mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-14 21:46:14 +00:00
Compare commits
22 Commits
3194-brows
...
individual
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6a5e6c7c8b | ||
|
|
4f362385e1 | ||
|
|
a01d6169d2 | ||
|
|
9beda3911d | ||
|
|
5ed596bfa9 | ||
|
|
99ca8787ab | ||
|
|
8f1a6feb90 | ||
|
|
c0e229201b | ||
|
|
66bc7fbc04 | ||
|
|
530bd40ca5 | ||
|
|
36004cf74b | ||
|
|
c7374245e1 | ||
|
|
59df59e9cd | ||
|
|
c0c2898b91 | ||
|
|
abac660bac | ||
|
|
26de64d873 | ||
|
|
79d9a8ca28 | ||
|
|
5c391fbcad | ||
|
|
d7e24f64a5 | ||
|
|
d6427d823f | ||
|
|
47eb874f47 | ||
|
|
37019355fd |
40
.github/workflows/test-container-build.yml
vendored
40
.github/workflows/test-container-build.yml
vendored
@@ -23,8 +23,28 @@ on:
|
||||
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
|
||||
# @todo: some kind of path filter for requirements.txt and Dockerfile
|
||||
jobs:
|
||||
test-container-build:
|
||||
builder:
|
||||
name: Build ${{ matrix.platform }} (${{ matrix.dockerfile == './Dockerfile' && 'main' || 'alpine' }})
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# Main Dockerfile platforms
|
||||
- platform: linux/amd64
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm64
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm/v7
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm/v8
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm64/v8
|
||||
dockerfile: ./Dockerfile
|
||||
# Alpine Dockerfile platforms (musl via alpine check)
|
||||
- platform: linux/amd64
|
||||
dockerfile: ./.github/test/Dockerfile-alpine
|
||||
- platform: linux/arm64
|
||||
dockerfile: ./.github/test/Dockerfile-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
@@ -47,26 +67,14 @@ jobs:
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
# https://github.com/dgtlmoon/changedetection.io/pull/1067
|
||||
# Check we can still build under alpine/musl
|
||||
- name: Test that the docker containers can build (musl via alpine check)
|
||||
id: docker_build_musl
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./
|
||||
file: ./.github/test/Dockerfile-alpine
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Test that the docker containers can build
|
||||
- name: Test that the docker containers can build (${{ matrix.platform }} - ${{ matrix.dockerfile }})
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
# https://github.com/docker/build-push-action#customizing
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
|
||||
@@ -71,6 +71,7 @@ jobs:
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_watch_model'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_jinja2_security'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_semver'
|
||||
docker run test-changedetectionio bash -c 'python3 -m unittest changedetectionio.tests.unit.test_update_watch_deep_merge'
|
||||
|
||||
- name: Test built container with Pytest (generally as requests/plaintext fetching)
|
||||
run: |
|
||||
@@ -179,6 +180,26 @@ jobs:
|
||||
|
||||
docker kill test-changedetectionio
|
||||
|
||||
- name: Test HTTPS SSL mode
|
||||
run: |
|
||||
openssl req -x509 -newkey rsa:4096 -keyout privkey.pem -out cert.pem -days 365 -nodes -subj "/CN=localhost"
|
||||
docker run --name test-changedetectionio-ssl --rm -e SSL_CERT_FILE=cert.pem -e SSL_PRIVKEY_FILE=privkey.pem -p 5000:5000 -v ./cert.pem:/app/cert.pem -v ./privkey.pem:/app/privkey.pem -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it
|
||||
# -k because its self-signed
|
||||
curl --retry-connrefused --retry 6 -k https://localhost:5000 -v|grep -q checkbox-uuid
|
||||
|
||||
docker kill test-changedetectionio-ssl
|
||||
|
||||
- name: Test IPv6 Mode
|
||||
run: |
|
||||
# IPv6 - :: bind to all interfaces inside container (like 0.0.0.0), ::1 would be localhost only
|
||||
docker run --name test-changedetectionio-ipv6 --rm -p 5000:5000 -e LISTEN_HOST=:: -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it on localhost
|
||||
curl --retry-connrefused --retry 6 http://[::1]:5000 -v|grep -q checkbox-uuid
|
||||
docker kill test-changedetectionio-ipv6
|
||||
|
||||
- name: Test changedetection.io SIGTERM and SIGINT signal shutdown
|
||||
run: |
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.50.2'
|
||||
__version__ = '0.50.3'
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
@@ -65,8 +65,7 @@ def main():
|
||||
|
||||
datastore_path = None
|
||||
do_cleanup = False
|
||||
host = "0.0.0.0"
|
||||
ipv6_enabled = False
|
||||
host = os.environ.get("LISTEN_HOST", "0.0.0.0").strip()
|
||||
port = int(os.environ.get('PORT', 5000))
|
||||
ssl_mode = False
|
||||
|
||||
@@ -108,10 +107,6 @@ def main():
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
if opt == '-6':
|
||||
logger.success("Enabling IPv6 listen support")
|
||||
ipv6_enabled = True
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
@@ -123,6 +118,20 @@ def main():
|
||||
if opt == '-l':
|
||||
logger_level = int(arg) if arg.isdigit() else arg.upper()
|
||||
|
||||
|
||||
logger.success(f"changedetection.io version {get_version()} starting.")
|
||||
# Launch using SocketIO run method for proper integration (if enabled)
|
||||
ssl_cert_file = os.getenv("SSL_CERT_FILE", 'cert.pem')
|
||||
ssl_privkey_file = os.getenv("SSL_PRIVKEY_FILE", 'privkey.pem')
|
||||
if os.getenv("SSL_CERT_FILE") and os.getenv("SSL_PRIVKEY_FILE"):
|
||||
ssl_mode = True
|
||||
|
||||
# SSL mode could have been set by -s too, therefor fallback to default values
|
||||
if ssl_mode:
|
||||
if not os.path.isfile(ssl_cert_file) or not os.path.isfile(ssl_privkey_file):
|
||||
logger.critical(f"Cannot start SSL/HTTPS mode, Please be sure that {ssl_cert_file}' and '{ssl_privkey_file}' exist in in {os.getcwd()}")
|
||||
os._exit(2)
|
||||
|
||||
# Without this, a logger will be duplicated
|
||||
logger.remove()
|
||||
try:
|
||||
@@ -222,19 +231,19 @@ def main():
|
||||
|
||||
|
||||
# SocketIO instance is already initialized in flask_app.py
|
||||
|
||||
# Launch using SocketIO run method for proper integration (if enabled)
|
||||
if socketio_server:
|
||||
if ssl_mode:
|
||||
socketio.run(app, host=host, port=int(port), debug=False,
|
||||
certfile='cert.pem', keyfile='privkey.pem', allow_unsafe_werkzeug=True)
|
||||
logger.success(f"SSL mode enabled, attempting to start with '{ssl_cert_file}' and '{ssl_privkey_file}' in {os.getcwd()}")
|
||||
socketio.run(app, host=host, port=int(port), debug=False,
|
||||
ssl_context=(ssl_cert_file, ssl_privkey_file), allow_unsafe_werkzeug=True)
|
||||
else:
|
||||
socketio.run(app, host=host, port=int(port), debug=False, allow_unsafe_werkzeug=True)
|
||||
else:
|
||||
# Run Flask app without Socket.IO if disabled
|
||||
logger.info("Starting Flask app without Socket.IO server")
|
||||
if ssl_mode:
|
||||
app.run(host=host, port=int(port), debug=False,
|
||||
ssl_context=('cert.pem', 'privkey.pem'))
|
||||
logger.success(f"SSL mode enabled, attempting to start with '{ssl_cert_file}' and '{ssl_privkey_file}' in {os.getcwd()}")
|
||||
app.run(host=host, port=int(port), debug=False,
|
||||
ssl_context=(ssl_cert_file, ssl_privkey_file))
|
||||
else:
|
||||
app.run(host=host, port=int(port), debug=False)
|
||||
|
||||
@@ -132,7 +132,7 @@ class steppable_browser_interface():
|
||||
|
||||
# Incase they request to go back to the start
|
||||
async def action_goto_site(self, selector=None, value=None):
|
||||
return await self.action_goto_url(value=self.start_url)
|
||||
return await self.action_goto_url(value=re.sub(r'^source:', '', self.start_url, flags=re.IGNORECASE))
|
||||
|
||||
async def action_click_element_containing_text(self, selector=None, value=''):
|
||||
logger.debug("Clicking element containing text")
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<legend>Add a new organisational tag</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||
{{ render_simple_field(form.name, placeholder="Watch group / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||
|
||||
@@ -159,12 +159,20 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, worker_handle
|
||||
def mark_all_viewed():
|
||||
# Save the current newest history as the most recently viewed
|
||||
with_errors = request.args.get('with_errors') == "1"
|
||||
tag_limit = request.args.get('tag')
|
||||
logger.debug(f"Limiting to tag {tag_limit}")
|
||||
now = int(time.time())
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
datastore.set_last_viewed(watch_uuid, int(time.time()))
|
||||
|
||||
return redirect(url_for('watchlist.index'))
|
||||
if tag_limit and ( not watch.get('tags') or tag_limit not in watch['tags'] ):
|
||||
logger.debug(f"Skipping watch {watch_uuid}")
|
||||
continue
|
||||
|
||||
datastore.set_last_viewed(watch_uuid, now)
|
||||
|
||||
return redirect(url_for('watchlist.index', tag=tag_limit))
|
||||
|
||||
@ui_blueprint.route("/delete", methods=['GET'])
|
||||
@login_optionally_required
|
||||
@@ -295,7 +303,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, worker_handle
|
||||
watch['ignore_text'] += datastore.data['settings']['application']['global_ignore_text']
|
||||
watch['subtractive_selectors'] += datastore.data['settings']['application']['global_subtractive_selectors']
|
||||
|
||||
watch_json = json.dumps(watch)
|
||||
watch_json = json.dumps(dict(watch))
|
||||
|
||||
try:
|
||||
r = requests.request(method="POST",
|
||||
|
||||
@@ -18,19 +18,20 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
transition: background-size 0.9s ease
|
||||
}
|
||||
</style>
|
||||
<div class="box">
|
||||
<div class="box" id="form-quick-watch-add">
|
||||
|
||||
<form class="pure-form" action="{{ url_for('ui.ui_views.form_quick_watch_add', tag=active_tag_uuid) }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
<legend>Add a new web page change detection watch</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
|
||||
{{ render_nolabel_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_nolabel_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="watch label / tag") }}
|
||||
{{ render_nolabel_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
{{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
|
||||
</div>
|
||||
<div id="watch-group-tag">
|
||||
{{ render_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="Watch group / tag", class="transparent-field") }}
|
||||
</div>
|
||||
<div id="quick-watch-processor-type">
|
||||
{{ render_simple_field(form.processor) }}
|
||||
</div>
|
||||
@@ -38,7 +39,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
</fieldset>
|
||||
<span style="color:#eee; font-size: 80%;"><img alt="Create a shareable link" style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" > Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></span>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
<div class="box">
|
||||
<form class="pure-form" action="{{ url_for('ui.form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<input type="hidden" id="op_extradata" name="op_extradata" value="" >
|
||||
@@ -212,9 +214,14 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<li id="post-list-mark-views" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<a href="{{url_for('ui.mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed</a>
|
||||
</li>
|
||||
{%- if active_tag_uuid -%}
|
||||
<li id="post-list-mark-views-tag">
|
||||
<a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a>
|
||||
</li>
|
||||
{%- endif -%}
|
||||
<li>
|
||||
<a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck
|
||||
all {%- if active_tag_uuid-%} in "{{active_tag.title}}"{%endif%}</a>
|
||||
all {% if active_tag_uuid %} in '{{active_tag.title}}'{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('rss.feed', tag=active_tag_uuid, token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='generic_feed-icon.svg')}}" height="15"></a>
|
||||
|
||||
@@ -51,7 +51,15 @@ async def capture_full_page(page):
|
||||
await page.setViewport({'width': page.viewport['width'], 'height': step_size})
|
||||
|
||||
while y < min(page_height, SCREENSHOT_MAX_TOTAL_HEIGHT):
|
||||
await page.evaluate(f"window.scrollTo(0, {y})")
|
||||
# better than scrollTo incase they override it in the page
|
||||
await page.evaluate(
|
||||
"""(y) => {
|
||||
document.documentElement.scrollTop = y;
|
||||
document.body.scrollTop = y;
|
||||
}""",
|
||||
y
|
||||
)
|
||||
|
||||
screenshot_chunks.append(await page.screenshot(type_='jpeg',
|
||||
fullPage=False,
|
||||
quality=int(os.getenv("SCREENSHOT_QUALITY", 72))))
|
||||
@@ -149,7 +157,11 @@ class fetcher(Fetcher):
|
||||
):
|
||||
import re
|
||||
self.delete_browser_steps_screenshots()
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
|
||||
n = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
extra_wait = min(n, 15)
|
||||
|
||||
logger.debug(f"Extra wait set to {extra_wait}s, requested was {n}s.")
|
||||
|
||||
from pyppeteer import Pyppeteer
|
||||
pyppeteer_instance = Pyppeteer()
|
||||
@@ -165,7 +177,7 @@ class fetcher(Fetcher):
|
||||
except websockets.exceptions.InvalidURI:
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser, check your browser connection address (should be ws:// or wss://")
|
||||
except Exception as e:
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser {str(e)}")
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser - Exception '{str(e)}'")
|
||||
|
||||
# Better is to launch chrome with the URL as arg
|
||||
# non-headless - newPage() will launch an extra tab/window, .browser should already contain 1 page/tab
|
||||
@@ -227,13 +239,35 @@ class fetcher(Fetcher):
|
||||
# browsersteps_interface = steppable_browser_interface()
|
||||
# browsersteps_interface.page = self.page
|
||||
|
||||
response = await self.page.goto(url, waitUntil="load")
|
||||
async def handle_frame_navigation(event):
|
||||
logger.debug(f"Frame navigated: {event}")
|
||||
w = extra_wait - 2 if extra_wait > 4 else 2
|
||||
logger.debug(f"Waiting {w} seconds before calling Page.stopLoading...")
|
||||
await asyncio.sleep(w)
|
||||
logger.debug("Issuing stopLoading command...")
|
||||
await self.page._client.send('Page.stopLoading')
|
||||
logger.debug("stopLoading command sent!")
|
||||
|
||||
if response is None:
|
||||
await self.page.close()
|
||||
await browser.close()
|
||||
logger.warning("Content Fetcher > Response object was none (as in, the response from the browser was empty, not just the content)")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
self.page._client.on('Page.frameStartedNavigating', lambda event: asyncio.create_task(handle_frame_navigation(event)))
|
||||
self.page._client.on('Page.frameStartedLoading', lambda event: asyncio.create_task(handle_frame_navigation(event)))
|
||||
self.page._client.on('Page.frameStoppedLoading', lambda event: logger.debug(f"Frame stopped loading: {event}"))
|
||||
|
||||
response = None
|
||||
attempt=0
|
||||
while not response:
|
||||
logger.debug(f"Attempting page fetch {url} attempt {attempt}")
|
||||
response = await self.page.goto(url)
|
||||
await asyncio.sleep(1 + extra_wait)
|
||||
if response:
|
||||
break
|
||||
if not response:
|
||||
logger.warning("Page did not fetch! trying again!")
|
||||
if response is None and attempt>=2:
|
||||
await self.page.close()
|
||||
await browser.close()
|
||||
logger.warning(f"Content Fetcher > Response object was none (as in, the response from the browser was empty, not just the content) exiting attmpt {attempt}")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
attempt+=1
|
||||
|
||||
self.headers = response.headers
|
||||
|
||||
@@ -276,7 +310,6 @@ class fetcher(Fetcher):
|
||||
# if self.browser_steps_get_valid_steps():
|
||||
# self.iterate_browser_steps()
|
||||
|
||||
await asyncio.sleep(1 + extra_wait)
|
||||
|
||||
# So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
|
||||
# Setup the xPath/VisualSelector scraper
|
||||
|
||||
@@ -18,6 +18,7 @@ async () => {
|
||||
'back-order or out of stock',
|
||||
'backordered',
|
||||
'benachrichtigt mich', // notify me
|
||||
'binnenkort leverbaar', // coming soon
|
||||
'brak na stanie',
|
||||
'brak w magazynie',
|
||||
'coming soon',
|
||||
@@ -85,6 +86,7 @@ async () => {
|
||||
'tidak tersedia',
|
||||
'tijdelijk uitverkocht',
|
||||
'tiket tidak tersedia',
|
||||
'to subscribe to back in stock',
|
||||
'tükendi',
|
||||
'unavailable nearby',
|
||||
'unavailable tickets',
|
||||
@@ -119,8 +121,7 @@ async () => {
|
||||
return text.toLowerCase().trim();
|
||||
}
|
||||
|
||||
const negateOutOfStockRegex = new RegExp('^([0-9] in stock|add to cart|in stock)', 'ig');
|
||||
|
||||
const negateOutOfStockRegex = new RegExp('^([0-9] in stock|add to cart|in stock|arrives approximately)', 'ig');
|
||||
// The out-of-stock or in-stock-text is generally always above-the-fold
|
||||
// and often below-the-fold is a list of related products that may or may not contain trigger text
|
||||
// so it's good to filter to just the 'above the fold' elements
|
||||
|
||||
@@ -3,6 +3,41 @@ import asyncio
|
||||
from blinker import signal
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class NotificationQueue(queue.Queue):
|
||||
"""
|
||||
Extended Queue that sends a 'notification_event' signal when notifications are added.
|
||||
|
||||
This class extends the standard Queue and adds a signal emission after a notification
|
||||
is put into the queue. The signal includes the watch UUID if available.
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize=0):
|
||||
super().__init__(maxsize)
|
||||
try:
|
||||
self.notification_event_signal = signal('notification_event')
|
||||
except Exception as e:
|
||||
logger.critical(f"Exception creating notification_event signal: {e}")
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
# Call the parent's put method first
|
||||
super().put(item, block, timeout)
|
||||
|
||||
# After putting the notification in the queue, emit signal with watch UUID
|
||||
try:
|
||||
if self.notification_event_signal and isinstance(item, dict):
|
||||
watch_uuid = item.get('uuid')
|
||||
if watch_uuid:
|
||||
# Send the notification_event signal with the watch UUID
|
||||
self.notification_event_signal.send(watch_uuid=watch_uuid)
|
||||
logger.trace(f"NotificationQueue: Emitted notification_event signal for watch UUID {watch_uuid}")
|
||||
else:
|
||||
# Send signal without UUID for system notifications
|
||||
self.notification_event_signal.send()
|
||||
logger.trace("NotificationQueue: Emitted notification_event signal for system notification")
|
||||
except Exception as e:
|
||||
logger.error(f"Exception emitting notification_event signal: {e}")
|
||||
|
||||
class SignalPriorityQueue(queue.PriorityQueue):
|
||||
"""
|
||||
Extended PriorityQueue that sends a signal when items with a UUID are added.
|
||||
|
||||
@@ -12,7 +12,7 @@ from blinker import signal
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from threading import Event
|
||||
from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue
|
||||
from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue, NotificationQueue
|
||||
from changedetectionio import worker_handler
|
||||
|
||||
from flask import (
|
||||
@@ -52,7 +52,7 @@ extra_stylesheets = []
|
||||
|
||||
# Use async queue by default, keep sync for backward compatibility
|
||||
update_q = AsyncSignalPriorityQueue() if worker_handler.USE_ASYNC_WORKERS else SignalPriorityQueue()
|
||||
notification_q = queue.Queue()
|
||||
notification_q = NotificationQueue()
|
||||
MAX_QUEUE_SIZE = 2000
|
||||
|
||||
app = Flask(__name__,
|
||||
@@ -100,7 +100,7 @@ watch_api = Api(app, decorators=[csrf.exempt])
|
||||
def init_app_secret(datastore_path):
|
||||
secret = ""
|
||||
|
||||
path = "{}/secret.txt".format(datastore_path)
|
||||
path = os.path.join(datastore_path, "secret.txt")
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
|
||||
@@ -738,7 +738,7 @@ class globalSettingsRequestForm(Form):
|
||||
return False
|
||||
|
||||
class globalSettingsApplicationUIForm(Form):
|
||||
open_diff_in_new_tab = BooleanField('Open diff page in a new tab', default=True, validators=[validators.Optional()])
|
||||
open_diff_in_new_tab = BooleanField("Open 'History' page in a new tab", default=True, validators=[validators.Optional()])
|
||||
socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()])
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
|
||||
@@ -43,14 +43,14 @@ class model(watch_base):
|
||||
self.__datastore_path = kw.get('datastore_path')
|
||||
if kw.get('datastore_path'):
|
||||
del kw['datastore_path']
|
||||
|
||||
|
||||
# Save default before passing to parent, since parent will delete it
|
||||
default_values = kw.get('default')
|
||||
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
if kw.get('default'):
|
||||
self.update(kw['default'])
|
||||
del kw['default']
|
||||
|
||||
if self.get('default'):
|
||||
del self['default']
|
||||
|
||||
if default_values:
|
||||
self.update(default_values)
|
||||
|
||||
# Be sure the cached timestamp is ready
|
||||
bump = self.history
|
||||
@@ -227,8 +227,8 @@ class model(watch_base):
|
||||
|
||||
@property
|
||||
def has_history(self):
|
||||
fname = os.path.join(self.watch_data_dir, "history.txt")
|
||||
return os.path.isfile(fname)
|
||||
fname = self._get_data_file_path("history.txt")
|
||||
return fname and os.path.isfile(fname)
|
||||
|
||||
@property
|
||||
def has_browser_steps(self):
|
||||
@@ -405,16 +405,16 @@ class model(watch_base):
|
||||
return not local_lines.issubset(existing_history)
|
||||
|
||||
def get_screenshot(self):
|
||||
fname = os.path.join(self.watch_data_dir, "last-screenshot.png")
|
||||
if os.path.isfile(fname):
|
||||
fname = self._get_data_file_path("last-screenshot.png")
|
||||
if fname and os.path.isfile(fname):
|
||||
return fname
|
||||
|
||||
# False is not an option for AppRise, must be type None
|
||||
return None
|
||||
|
||||
def __get_file_ctime(self, filename):
|
||||
fname = os.path.join(self.watch_data_dir, filename)
|
||||
if os.path.isfile(fname):
|
||||
fname = self._get_data_file_path(filename)
|
||||
if fname and os.path.isfile(fname):
|
||||
return int(os.path.getmtime(fname))
|
||||
return False
|
||||
|
||||
@@ -441,20 +441,28 @@ class model(watch_base):
|
||||
@property
|
||||
def watch_data_dir(self):
|
||||
# The base dir of the watch data
|
||||
return os.path.join(self.__datastore_path, self['uuid']) if self.__datastore_path else None
|
||||
if self.__datastore_path and self.get('uuid'):
|
||||
return os.path.join(self.__datastore_path, self['uuid'])
|
||||
return None
|
||||
|
||||
def _get_data_file_path(self, filename):
|
||||
"""Safely get the full path to a data file, returns None if watch_data_dir is None"""
|
||||
if self.watch_data_dir:
|
||||
return os.path.join(self.watch_data_dir, filename)
|
||||
return None
|
||||
|
||||
def get_error_text(self):
|
||||
"""Return the text saved from a previous request that resulted in a non-200 error"""
|
||||
fname = os.path.join(self.watch_data_dir, "last-error.txt")
|
||||
if os.path.isfile(fname):
|
||||
fname = self._get_data_file_path("last-error.txt")
|
||||
if fname and os.path.isfile(fname):
|
||||
with open(fname, 'r') as f:
|
||||
return f.read()
|
||||
return False
|
||||
|
||||
def get_error_snapshot(self):
|
||||
"""Return path to the screenshot that resulted in a non-200 error"""
|
||||
fname = os.path.join(self.watch_data_dir, "last-error-screenshot.png")
|
||||
if os.path.isfile(fname):
|
||||
fname = self._get_data_file_path("last-error-screenshot.png")
|
||||
if fname and os.path.isfile(fname):
|
||||
return fname
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import os
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from changedetectionio import strtobool
|
||||
default_notification_format_for_watch = 'System default'
|
||||
|
||||
class watch_base(dict):
|
||||
class watch_base:
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
self.update({
|
||||
self.__data = {
|
||||
# Custom notification content
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
@@ -128,9 +129,78 @@ class watch_base(dict):
|
||||
'uuid': str(uuid.uuid4()),
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
})
|
||||
}
|
||||
|
||||
if len(arg) == 1 and (isinstance(arg[0], dict) or hasattr(arg[0], 'keys')):
|
||||
self.__data.update(arg[0])
|
||||
if kw:
|
||||
self.__data.update(kw)
|
||||
|
||||
super(watch_base, self).__init__(*arg, **kw)
|
||||
if self.__data.get('default'):
|
||||
del self.__data['default']
|
||||
|
||||
if self.get('default'):
|
||||
del self['default']
|
||||
def __getitem__(self, key):
|
||||
return self.__data[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.__data[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.__data[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__data)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__data)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.__data
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.__data)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.__data)
|
||||
|
||||
def keys(self):
|
||||
return self.__data.keys()
|
||||
|
||||
def values(self):
|
||||
return self.__data.values()
|
||||
|
||||
def items(self):
|
||||
return self.__data.items()
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self.__data.get(key, default)
|
||||
|
||||
def pop(self, key, *args):
|
||||
return self.__data.pop(key, *args)
|
||||
|
||||
def popitem(self):
|
||||
return self.__data.popitem()
|
||||
|
||||
def clear(self):
|
||||
self.__data.clear()
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
self.__data.update(*args, **kwargs)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
return self.__data.setdefault(key, default)
|
||||
|
||||
def copy(self):
|
||||
return self.__data.copy()
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
from copy import deepcopy
|
||||
new_instance = self.__class__()
|
||||
new_instance.__data = deepcopy(self.__data, memo)
|
||||
return new_instance
|
||||
|
||||
def __reduce__(self):
|
||||
return (self.__class__, (self.__data,))
|
||||
|
||||
def to_dict(self):
|
||||
return dict(self.__data)
|
||||
@@ -42,10 +42,10 @@ class Restock(dict):
|
||||
|
||||
# Update with any provided positional arguments (dictionaries)
|
||||
if args:
|
||||
if len(args) == 1 and isinstance(args[0], dict):
|
||||
if len(args) == 1 and (isinstance(args[0], dict) or hasattr(args[0], 'keys')):
|
||||
self.update(args[0])
|
||||
else:
|
||||
raise ValueError("Only one positional argument of type 'dict' is allowed")
|
||||
raise ValueError("Only one positional argument of type 'dict' or dict-like is allowed")
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
# Custom logic to handle setting price and original_price
|
||||
|
||||
@@ -29,6 +29,11 @@ class SignalHandler:
|
||||
watch_delete_signal = signal('watch_deleted')
|
||||
watch_delete_signal.connect(self.handle_deleted_signal, weak=False)
|
||||
|
||||
# Connect to the notification_event signal
|
||||
notification_event_signal = signal('notification_event')
|
||||
notification_event_signal.connect(self.handle_notification_event, weak=False)
|
||||
logger.info("SignalHandler: Connected to notification_event signal")
|
||||
|
||||
# Create and start the queue update thread using standard threading
|
||||
import threading
|
||||
self.polling_emitter_thread = threading.Thread(
|
||||
@@ -89,6 +94,23 @@ class SignalHandler:
|
||||
except Exception as e:
|
||||
logger.error(f"Socket.IO error in handle_queue_length: {str(e)}")
|
||||
|
||||
def handle_notification_event(self, *args, **kwargs):
|
||||
"""Handle notification_event signal and emit to all clients"""
|
||||
try:
|
||||
watch_uuid = kwargs.get('watch_uuid')
|
||||
logger.debug(f"SignalHandler: Notification event received for watch UUID: {watch_uuid}")
|
||||
|
||||
# Emit the notification event to all connected clients
|
||||
self.socketio_instance.emit("notification_event", {
|
||||
"watch_uuid": watch_uuid,
|
||||
"event_timestamp": time.time()
|
||||
})
|
||||
|
||||
logger.trace(f"Socket.IO: Emitted notification_event for watch UUID {watch_uuid}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Socket.IO error in handle_notification_event: {str(e)}")
|
||||
|
||||
|
||||
def polling_emit_running_or_queued_watches_threaded(self):
|
||||
"""Threading version of polling for Windows compatibility"""
|
||||
|
||||
@@ -48,13 +48,14 @@ $(document).ready(function () {
|
||||
// Connect to Socket.IO on the same host/port, with path from template
|
||||
const socket = io({
|
||||
path: socketio_url, // This will be the path prefix like "/app/socket.io" from the template
|
||||
transports: ['polling', 'websocket'], // Try WebSocket but fall back to polling
|
||||
reconnectionDelay: 1000,
|
||||
reconnectionAttempts: 15
|
||||
transports: ['websocket', 'polling'],
|
||||
reconnectionDelay: 3000,
|
||||
reconnectionAttempts: 25
|
||||
});
|
||||
|
||||
// Connection status logging
|
||||
socket.on('connect', function () {
|
||||
$('#realtime-conn-error').hide();
|
||||
console.log('Socket.IO connected with path:', socketio_url);
|
||||
console.log('Socket transport:', socket.io.engine.transport.name);
|
||||
bindSocketHandlerButtonsEvents(socket);
|
||||
@@ -74,7 +75,8 @@ $(document).ready(function () {
|
||||
|
||||
socket.on('disconnect', function (reason) {
|
||||
console.log('Socket.IO disconnected, reason:', reason);
|
||||
$('.ajax-op').off('.socketHandlerNamespace')
|
||||
$('.ajax-op').off('.socketHandlerNamespace');
|
||||
$('#realtime-conn-error').show();
|
||||
});
|
||||
|
||||
socket.on('queue_size', function (data) {
|
||||
@@ -92,6 +94,16 @@ $(document).ready(function () {
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('notification_event', function (data) {
|
||||
console.log(`Stub handler for notification_event ${data.watch_uuid}`)
|
||||
});
|
||||
|
||||
socket.on('watch_deleted', function (data) {
|
||||
$('tr[data-watch-uuid="' + data.uuid + '"] td').fadeOut(500, function () {
|
||||
$(this).closest('tr').remove();
|
||||
});
|
||||
});
|
||||
|
||||
// Listen for periodically emitted watch data
|
||||
console.log('Adding watch_update event listener');
|
||||
|
||||
|
||||
@@ -16,6 +16,12 @@ $(function () {
|
||||
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||
});
|
||||
|
||||
|
||||
$('.history-link').click(function (e) {
|
||||
// Incase they click 'back' in the browser, it should be removed.
|
||||
$(this).closest('tr').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
|
||||
@@ -71,6 +71,7 @@
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-border-input: var(--color-grey-500);
|
||||
--color-shadow-input: var(--color-grey-400);
|
||||
@@ -97,6 +98,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-second: #1e316c;
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
|
||||
@@ -78,6 +78,7 @@
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
|
||||
--color-border-input: var(--color-grey-500);
|
||||
@@ -112,6 +113,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
|
||||
@@ -17,11 +17,13 @@
|
||||
&.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
a::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
}
|
||||
|
||||
a.external::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -185,7 +185,8 @@ code {
|
||||
}
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
max-width: 100%;
|
||||
margin: 0 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
@@ -279,7 +280,7 @@ a.pure-button-selected {
|
||||
font-size: 65%;
|
||||
border-bottom-left-radius: initial;
|
||||
border-bottom-right-radius: initial;
|
||||
|
||||
margin-right: 4px;
|
||||
&.active {
|
||||
background: var(--color-background-button-tag-active);
|
||||
font-weight: bold;
|
||||
@@ -372,11 +373,32 @@ label {
|
||||
}
|
||||
}
|
||||
|
||||
// Some field colouring for transperant field
|
||||
.pure-form input[type=text].transparent-field {
|
||||
background-color: var(--color-background-new-watch-input-transparent) !important;
|
||||
color: var(--color-white) !important;
|
||||
border: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
box-shadow: none !important;
|
||||
-webkit-box-shadow: none !important;
|
||||
&::placeholder {
|
||||
opacity: 0.5;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
font-weight: lighter;
|
||||
}
|
||||
}
|
||||
|
||||
#new-watch-form {
|
||||
background: var(--color-background-new-watch-form);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
max-width: 100%;
|
||||
|
||||
#url {
|
||||
&::placeholder {
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
input {
|
||||
display: inline-block;
|
||||
@@ -397,12 +419,13 @@ label {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
@media only screen and (min-width: 760px) {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row;
|
||||
min-width: 70vw;
|
||||
}
|
||||
/* URL field grows always, other stay static in width */
|
||||
> span {
|
||||
@@ -424,6 +447,22 @@ label {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#watch-group-tag {
|
||||
font-size: 0.9rem;
|
||||
padding: 0.3rem;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
color: var(--color-white);
|
||||
label, input {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
input {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -620,10 +659,6 @@ footer {
|
||||
|
||||
@media only screen and (max-width: 760px),
|
||||
(min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0;
|
||||
@@ -1143,16 +1178,14 @@ ul {
|
||||
color: #fff;
|
||||
ul {
|
||||
padding: 0.3rem;
|
||||
|
||||
li {
|
||||
list-style: none;
|
||||
font-size: 0.8rem;
|
||||
font-size: 0.9rem;
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.restock-label {
|
||||
@@ -1190,3 +1223,12 @@ ul {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
#realtime-conn-error {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 30px;
|
||||
background: var(--color-warning);
|
||||
padding: 10px;
|
||||
font-size: 0.8rem;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
@@ -322,6 +322,7 @@ ul#requests-extra_browsers {
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-border-input: var(--color-grey-500);
|
||||
--color-shadow-input: var(--color-grey-400);
|
||||
@@ -348,6 +349,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-second: #1e316c;
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
@@ -537,9 +539,9 @@ body.preview-text-enabled {
|
||||
.watch-table td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal; }
|
||||
.watch-table td.title-col a::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px; }
|
||||
.watch-table td a.external::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px; }
|
||||
.watch-table th {
|
||||
white-space: nowrap; }
|
||||
.watch-table th a {
|
||||
@@ -826,7 +828,8 @@ code {
|
||||
background: var(--color-text-watch-tag-list); }
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
max-width: 100%;
|
||||
margin: 0 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
justify-content: center; }
|
||||
@@ -899,7 +902,8 @@ a.pure-button-selected {
|
||||
color: var(--color-text-button);
|
||||
font-size: 65%;
|
||||
border-bottom-left-radius: initial;
|
||||
border-bottom-right-radius: initial; }
|
||||
border-bottom-right-radius: initial;
|
||||
margin-right: 4px; }
|
||||
.button-tag.active {
|
||||
background: var(--color-background-button-tag-active);
|
||||
font-weight: bold; }
|
||||
@@ -962,11 +966,25 @@ label:hover {
|
||||
#token-table.pure-table th {
|
||||
font-size: 80%; }
|
||||
|
||||
.pure-form input[type=text].transparent-field {
|
||||
background-color: var(--color-background-new-watch-input-transparent) !important;
|
||||
color: var(--color-white) !important;
|
||||
border: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
box-shadow: none !important;
|
||||
-webkit-box-shadow: none !important; }
|
||||
.pure-form input[type=text].transparent-field::placeholder {
|
||||
opacity: 0.5;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
font-weight: lighter; }
|
||||
|
||||
#new-watch-form {
|
||||
background: var(--color-background-new-watch-form);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em; }
|
||||
margin-bottom: 1em;
|
||||
max-width: 100%; }
|
||||
#new-watch-form #url::placeholder {
|
||||
font-weight: bold; }
|
||||
#new-watch-form input {
|
||||
display: inline-block;
|
||||
margin-bottom: 5px; }
|
||||
@@ -984,7 +1002,8 @@ label:hover {
|
||||
#new-watch-form #watch-add-wrapper-zone {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row; } }
|
||||
flex-direction: row;
|
||||
min-width: 70vw; } }
|
||||
#new-watch-form #watch-add-wrapper-zone > span {
|
||||
flex-grow: 0; }
|
||||
#new-watch-form #watch-add-wrapper-zone > span input {
|
||||
@@ -995,6 +1014,17 @@ label:hover {
|
||||
@media only screen and (max-width: 760px) {
|
||||
#new-watch-form #watch-add-wrapper-zone #url {
|
||||
width: 100%; } }
|
||||
#new-watch-form #watch-group-tag {
|
||||
font-size: 0.9rem;
|
||||
padding: 0.3rem;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
color: var(--color-white); }
|
||||
#new-watch-form #watch-group-tag label, #new-watch-form #watch-group-tag input {
|
||||
margin: 0; }
|
||||
#new-watch-form #watch-group-tag input {
|
||||
flex: 1; }
|
||||
|
||||
#diff-col {
|
||||
padding-left: 40px; }
|
||||
@@ -1129,8 +1159,6 @@ footer {
|
||||
gap: 1em; }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%; }
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0; }
|
||||
@@ -1506,7 +1534,7 @@ ul {
|
||||
padding: 0.3rem; }
|
||||
#quick-watch-processor-type ul li {
|
||||
list-style: none;
|
||||
font-size: 0.8rem; }
|
||||
font-size: 0.9rem; }
|
||||
#quick-watch-processor-type ul li > * {
|
||||
display: inline-block; }
|
||||
|
||||
@@ -1535,3 +1563,12 @@ ul {
|
||||
height: 21px;
|
||||
padding: 2px;
|
||||
vertical-align: middle; }
|
||||
|
||||
#realtime-conn-error {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 30px;
|
||||
background: var(--color-warning);
|
||||
padding: 10px;
|
||||
font-size: 0.8rem;
|
||||
color: #fff; }
|
||||
|
||||
@@ -13,6 +13,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
@@ -22,6 +23,13 @@ from blinker import signal
|
||||
from .processors import get_custom_watch_obj_for_processor
|
||||
from .processors.restock_diff import Restock
|
||||
|
||||
class WatchEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
from .model import watch_base
|
||||
if isinstance(obj, watch_base):
|
||||
return dict(obj)
|
||||
return super().default(obj)
|
||||
|
||||
# Because the server will run as a daemon and wont know the URL for notification links when firing off a notification
|
||||
BASE_URL_NOT_SET_TEXT = '("Base URL" not set - see settings - notifications)'
|
||||
|
||||
@@ -45,14 +53,11 @@ class ChangeDetectionStore:
|
||||
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
|
||||
self.__data = App.model()
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.json_store_path = os.path.join(self.datastore_path, "url-watches.json")
|
||||
logger.info(f"Datastore path is '{self.json_store_path}'")
|
||||
self.needs_write = False
|
||||
self.start_time = time.time()
|
||||
self.stop_thread = False
|
||||
# Base definition for all watchers
|
||||
# deepcopy part of #569 - not sure why its needed exactly
|
||||
self.generic_definition = deepcopy(Watch.model(datastore_path = datastore_path, default={}))
|
||||
|
||||
if path.isfile('changedetectionio/source.txt'):
|
||||
with open('changedetectionio/source.txt') as f:
|
||||
@@ -118,14 +123,12 @@ class ChangeDetectionStore:
|
||||
test_list = self.proxy_list
|
||||
|
||||
# Helper to remove password protection
|
||||
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
|
||||
password_reset_lockfile = os.path.join(self.datastore_path, "removepassword.lock")
|
||||
if path.isfile(password_reset_lockfile):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
unlink(password_reset_lockfile)
|
||||
|
||||
if not 'app_guid' in self.__data:
|
||||
import os
|
||||
import sys
|
||||
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
||||
self.__data['app_guid'] = "test-" + str(uuid_builder.uuid4())
|
||||
else:
|
||||
@@ -175,6 +178,14 @@ class ChangeDetectionStore:
|
||||
self.__data['settings']['application']['password'] = False
|
||||
self.needs_write = True
|
||||
|
||||
def _deep_merge(self, target, source):
|
||||
"""Recursively merge source dict into target dict"""
|
||||
for key, value in source.items():
|
||||
if key in target and isinstance(target[key], dict) and isinstance(value, dict):
|
||||
self._deep_merge(target[key], value)
|
||||
else:
|
||||
target[key] = value
|
||||
|
||||
def update_watch(self, uuid, update_obj):
|
||||
|
||||
# It's possible that the watch could be deleted before update
|
||||
@@ -182,15 +193,8 @@ class ChangeDetectionStore:
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
|
||||
# In python 3.9 we have the |= dict operator, but that still will lose data on nested structures...
|
||||
for dict_key, d in self.generic_definition.items():
|
||||
if isinstance(d, dict):
|
||||
if update_obj is not None and dict_key in update_obj:
|
||||
self.__data['watching'][uuid][dict_key].update(update_obj[dict_key])
|
||||
del (update_obj[dict_key])
|
||||
|
||||
self.__data['watching'][uuid].update(update_obj)
|
||||
# Use recursive merge to handle nested dictionaries properly
|
||||
self._deep_merge(self.__data['watching'][uuid], update_obj)
|
||||
self.needs_write = True
|
||||
|
||||
@property
|
||||
@@ -386,14 +390,59 @@ class ChangeDetectionStore:
|
||||
return new_uuid
|
||||
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = "{}/last-screenshot.png".format(output_path)
|
||||
elements_index_filename = "{}/elements.deflate".format(output_path)
|
||||
output_path = os.path.join(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = os.path.join(output_path, "last-screenshot.png")
|
||||
elements_index_filename = os.path.join(output_path, "elements.deflate")
|
||||
if path.isfile(screenshot_filename) and path.isfile(elements_index_filename) :
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path # just for nicer paths
|
||||
|
||||
JSON_INDENT = 2 # or None in production
|
||||
ENCODER = WatchEncoder # your custom encoder
|
||||
|
||||
def save_json_atomic(self, save_path: str | os.PathLike, data) -> None:
|
||||
"""
|
||||
Atomically (re)write *path* with *data* encoded as JSON.
|
||||
The original file is left untouched if anything fails.
|
||||
"""
|
||||
import tempfile
|
||||
from pathlib import Path # just for nicer paths
|
||||
|
||||
JSON_INDENT = 2 # or None in production
|
||||
ENCODER = WatchEncoder # your custom encoder
|
||||
|
||||
datapath = Path(save_path)
|
||||
directory = datapath.parent
|
||||
|
||||
# 1. create a unique temp file in the same directory
|
||||
fd, tmp_name = tempfile.mkstemp(
|
||||
dir=directory,
|
||||
prefix=f"{datapath.name}.",
|
||||
suffix=".tmp",
|
||||
)
|
||||
try:
|
||||
with os.fdopen(fd, "w", encoding="utf-8") as tmp:
|
||||
json.dump(data, tmp, indent=JSON_INDENT, cls=ENCODER)
|
||||
if os.getenv('JSON_SAVE_FORCE_FLUSH'):
|
||||
tmp.flush() # push Python buffers
|
||||
os.fsync(tmp.fileno()) # force kernel to write to disk
|
||||
os.replace(tmp_name, datapath)
|
||||
|
||||
except Exception as e:
|
||||
logger.critical(f"Failed to write JSON to {datapath} - {str(e)}")
|
||||
# if anything above blew up, ensure we don't leave junk lying around
|
||||
try:
|
||||
os.unlink(tmp_name)
|
||||
finally:
|
||||
raise
|
||||
|
||||
|
||||
def sync_to_json(self):
|
||||
logger.info("Saving JSON..")
|
||||
try:
|
||||
@@ -405,22 +454,7 @@ class ChangeDetectionStore:
|
||||
self.sync_to_json()
|
||||
return
|
||||
else:
|
||||
|
||||
try:
|
||||
# Re #286 - First write to a temp file, then confirm it looks OK and rename it
|
||||
# This is a fairly basic strategy to deal with the case that the file is corrupted,
|
||||
# system was out of memory, out of RAM etc
|
||||
with open(self.json_store_path+".tmp", 'w') as json_file:
|
||||
# Use compact JSON in production for better performance
|
||||
debug_mode = os.environ.get('CHANGEDETECTION_DEBUG', 'false').lower() == 'true'
|
||||
if debug_mode:
|
||||
json.dump(data, json_file, indent=4)
|
||||
else:
|
||||
json.dump(data, json_file, separators=(',', ':'))
|
||||
os.replace(self.json_store_path+".tmp", self.json_store_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Error writing JSON!! (Main JSON file save was skipped) : {str(e)}")
|
||||
|
||||
self.save_json_atomic(save_path = self.json_store_path, data =data)
|
||||
self.needs_write = False
|
||||
self.needs_write_urgent = False
|
||||
|
||||
@@ -478,7 +512,7 @@ class ChangeDetectionStore:
|
||||
|
||||
# Load from external config file
|
||||
if path.isfile(proxy_list_file):
|
||||
with open("{}/proxies.json".format(self.datastore_path)) as f:
|
||||
with open(os.path.join(self.datastore_path, "proxies.json")) as f:
|
||||
proxy_list = json.load(f)
|
||||
|
||||
# Mapping from UI config if available
|
||||
@@ -736,10 +770,10 @@ class ChangeDetectionStore:
|
||||
logger.critical(f"Applying update_{update_n}")
|
||||
# Wont exist on fresh installs
|
||||
if os.path.exists(self.json_store_path):
|
||||
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
|
||||
shutil.copyfile(self.json_store_path, os.path.join(self.datastore_path, f"url-watches-before-{update_n}.json"))
|
||||
|
||||
try:
|
||||
update_method = getattr(self, "update_{}".format(update_n))()
|
||||
update_method = getattr(self, f"update_{update_n}")()
|
||||
except Exception as e:
|
||||
logger.error(f"Error while trying update_{update_n}")
|
||||
logger.error(e)
|
||||
|
||||
@@ -74,7 +74,7 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{watch_tag}}' }}</code></td>
|
||||
<td>The watch label / tag</td>
|
||||
<td>The watch group / tag</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{preview_url}}' }}</code></td>
|
||||
|
||||
@@ -236,6 +236,7 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='toggle-theme.js')}}" defer></script>
|
||||
|
||||
<div id="checking-now-fixed-tab" style="display: none;"><span class="spinner"></span><span> Checking now</span></div>
|
||||
<div id="realtime-conn-error" style="display:none">Offline</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
@@ -236,39 +236,41 @@ def test_group_tag_notification(client, live_server, measure_memory_usage):
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_limit_tag_ui(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
urls=[]
|
||||
test_url = url_for('test_random_content_endpoint', _external=True)
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?x="+str(i)+" test-tag")
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?non-grouped="+str(i))
|
||||
|
||||
res = client.post(
|
||||
# A space can label the tag, only the first one will have a tag
|
||||
client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": "\r\n".join(urls)},
|
||||
data={"urls": f"{test_url} test-tag\r\n{test_url}"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"40 Imported" in res.data
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
assert tag_uuid
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'test-tag' in res.data
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# All should be here
|
||||
assert res.data.count(b'processor-text_json_diff') == 40
|
||||
# Should be both unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 2
|
||||
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
|
||||
res = client.get(url_for("watchlist.index", tag=tag_uuid))
|
||||
# Now we recheck only the tag
|
||||
client.get(url_for('ui.mark_all_viewed', tag=tag_uuid), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
# Should be only 1 unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 1
|
||||
|
||||
|
||||
# Just a subset should be here
|
||||
assert b'test-tag' in res.data
|
||||
assert res.data.count(b'processor-text_json_diff') == 20
|
||||
assert b"object at" not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
|
||||
291
changedetectionio/tests/unit/test_update_watch_deep_merge.py
Normal file
291
changedetectionio/tests/unit/test_update_watch_deep_merge.py
Normal file
@@ -0,0 +1,291 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# run from dir above changedetectionio/ dir
|
||||
# python3 -m unittest changedetectionio.tests.unit.test_update_watch_deep_merge
|
||||
|
||||
import unittest
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
from unittest.mock import patch
|
||||
|
||||
from changedetectionio import store
|
||||
|
||||
|
||||
class TestUpdateWatchDeepMerge(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# Create a temporary directory for test data
|
||||
self.test_datastore_path = tempfile.mkdtemp()
|
||||
self.datastore = store.ChangeDetectionStore(datastore_path=self.test_datastore_path, include_default_watches=False)
|
||||
|
||||
# Create a test watch with known nested structure
|
||||
self.test_uuid = self.datastore.add_watch(url='http://example.com')
|
||||
|
||||
# Set up known initial nested structure
|
||||
initial_data = {
|
||||
'time_between_check': {'weeks': None, 'days': 1, 'hours': 6, 'minutes': 30, 'seconds': None},
|
||||
'headers': {'user-agent': 'test-browser', 'accept': 'text/html'},
|
||||
'time_schedule_limit': {
|
||||
'enabled': True,
|
||||
'monday': {
|
||||
'enabled': True,
|
||||
'start_time': '09:00',
|
||||
'duration': {'hours': '8', 'minutes': '00'}
|
||||
},
|
||||
'tuesday': {
|
||||
'enabled': False,
|
||||
'start_time': '10:00',
|
||||
'duration': {'hours': '6', 'minutes': '30'}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, initial_data)
|
||||
|
||||
def tearDown(self):
|
||||
self.datastore.stop_thread = True
|
||||
# Clean up the temporary directory
|
||||
shutil.rmtree(self.test_datastore_path, ignore_errors=True)
|
||||
|
||||
def test_simple_flat_update(self):
|
||||
"""Test that simple flat updates work as before"""
|
||||
update_obj = {'url': 'http://newexample.com', 'paused': True}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
self.assertEqual(watch['url'], 'http://newexample.com')
|
||||
self.assertEqual(watch['paused'], True)
|
||||
|
||||
def test_time_between_check_partial_update(self):
|
||||
"""Test partial update of time_between_check preserves existing keys"""
|
||||
# Update only hours, should preserve other existing values
|
||||
update_obj = {'time_between_check': {'hours': 2}}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
time_check = watch['time_between_check']
|
||||
|
||||
# Updated value
|
||||
self.assertEqual(time_check['hours'], 2)
|
||||
# Preserved existing values
|
||||
self.assertEqual(time_check['days'], 1)
|
||||
self.assertEqual(time_check['minutes'], 30)
|
||||
self.assertEqual(time_check['weeks'], None)
|
||||
self.assertEqual(time_check['seconds'], None)
|
||||
|
||||
def test_time_between_check_multiple_partial_updates(self):
|
||||
"""Test multiple partial updates to time_between_check"""
|
||||
# First update
|
||||
update_obj1 = {'time_between_check': {'minutes': 45}}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj1)
|
||||
|
||||
# Second update
|
||||
update_obj2 = {'time_between_check': {'seconds': 15}}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj2)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
time_check = watch['time_between_check']
|
||||
|
||||
# Both updates should be preserved
|
||||
self.assertEqual(time_check['minutes'], 45)
|
||||
self.assertEqual(time_check['seconds'], 15)
|
||||
# Original values should be preserved
|
||||
self.assertEqual(time_check['days'], 1)
|
||||
self.assertEqual(time_check['hours'], 6)
|
||||
|
||||
def test_headers_partial_update(self):
|
||||
"""Test partial update of headers preserves existing headers"""
|
||||
update_obj = {'headers': {'authorization': 'Bearer token123'}}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
headers = watch['headers']
|
||||
|
||||
# New header added
|
||||
self.assertEqual(headers['authorization'], 'Bearer token123')
|
||||
# Existing headers preserved
|
||||
self.assertEqual(headers['user-agent'], 'test-browser')
|
||||
self.assertEqual(headers['accept'], 'text/html')
|
||||
|
||||
def test_headers_update_existing_key(self):
|
||||
"""Test updating an existing header key"""
|
||||
update_obj = {'headers': {'user-agent': 'new-browser'}}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
headers = watch['headers']
|
||||
|
||||
# Updated existing header
|
||||
self.assertEqual(headers['user-agent'], 'new-browser')
|
||||
# Other headers preserved
|
||||
self.assertEqual(headers['accept'], 'text/html')
|
||||
|
||||
def test_time_schedule_limit_deep_nested_update(self):
|
||||
"""Test deep nested update of time_schedule_limit structure"""
|
||||
update_obj = {
|
||||
'time_schedule_limit': {
|
||||
'monday': {
|
||||
'duration': {'hours': '10'} # Only update hours, preserve minutes
|
||||
}
|
||||
}
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
schedule = watch['time_schedule_limit']
|
||||
|
||||
# Deep nested update applied
|
||||
self.assertEqual(schedule['monday']['duration']['hours'], '10')
|
||||
# Existing nested values preserved
|
||||
self.assertEqual(schedule['monday']['duration']['minutes'], '00')
|
||||
self.assertEqual(schedule['monday']['start_time'], '09:00')
|
||||
self.assertEqual(schedule['monday']['enabled'], True)
|
||||
# Other days preserved
|
||||
self.assertEqual(schedule['tuesday']['enabled'], False)
|
||||
self.assertEqual(schedule['enabled'], True)
|
||||
|
||||
def test_mixed_flat_and_nested_update(self):
|
||||
"""Test update with both flat and nested properties"""
|
||||
update_obj = {
|
||||
'url': 'http://mixed-update.com',
|
||||
'paused': False,
|
||||
'time_between_check': {'days': 2, 'minutes': 15},
|
||||
'headers': {'cookie': 'session=abc123'}
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
|
||||
# Flat updates
|
||||
self.assertEqual(watch['url'], 'http://mixed-update.com')
|
||||
self.assertEqual(watch['paused'], False)
|
||||
|
||||
# Nested updates
|
||||
time_check = watch['time_between_check']
|
||||
self.assertEqual(time_check['days'], 2)
|
||||
self.assertEqual(time_check['minutes'], 15)
|
||||
self.assertEqual(time_check['hours'], 6) # preserved
|
||||
|
||||
headers = watch['headers']
|
||||
self.assertEqual(headers['cookie'], 'session=abc123')
|
||||
self.assertEqual(headers['user-agent'], 'test-browser') # preserved
|
||||
|
||||
def test_overwrite_nested_with_flat(self):
|
||||
"""Test that providing a non-dict value overwrites the entire nested structure"""
|
||||
update_obj = {'time_between_check': 'invalid_value'}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
# Should completely replace the nested dict with the string
|
||||
self.assertEqual(watch['time_between_check'], 'invalid_value')
|
||||
|
||||
def test_add_new_nested_structure(self):
|
||||
"""Test adding a completely new nested dictionary"""
|
||||
update_obj = {
|
||||
'custom_config': {
|
||||
'option1': 'value1',
|
||||
'nested': {
|
||||
'suboption': 'subvalue'
|
||||
}
|
||||
}
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
self.assertEqual(watch['custom_config']['option1'], 'value1')
|
||||
self.assertEqual(watch['custom_config']['nested']['suboption'], 'subvalue')
|
||||
|
||||
def test_empty_dict_update(self):
|
||||
"""Test updating with empty dictionaries"""
|
||||
update_obj = {'headers': {}}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
# Empty dict should preserve existing headers (no keys to merge)
|
||||
self.assertEqual(watch['headers']['user-agent'], 'test-browser')
|
||||
self.assertEqual(watch['headers']['accept'], 'text/html')
|
||||
|
||||
def test_none_values_in_nested_update(self):
|
||||
"""Test handling None values in nested updates"""
|
||||
update_obj = {
|
||||
'time_between_check': {
|
||||
'hours': None,
|
||||
'days': 3
|
||||
}
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
time_check = watch['time_between_check']
|
||||
|
||||
self.assertEqual(time_check['hours'], None)
|
||||
self.assertEqual(time_check['days'], 3)
|
||||
self.assertEqual(time_check['minutes'], 30) # preserved
|
||||
|
||||
def test_real_world_api_update_scenario(self):
|
||||
"""Test a real-world API update scenario from the codebase analysis"""
|
||||
# Based on actual API call patterns found in the codebase
|
||||
update_obj = {
|
||||
"title": "Updated API Watch",
|
||||
'time_between_check': {'minutes': 60},
|
||||
'headers': {'authorization': 'Bearer api-token', 'user-agent': 'api-client'},
|
||||
'notification_urls': ['https://webhook.example.com']
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
|
||||
# Verify all updates
|
||||
self.assertEqual(watch['title'], 'Updated API Watch')
|
||||
self.assertEqual(watch['time_between_check']['minutes'], 60)
|
||||
self.assertEqual(watch['time_between_check']['days'], 1) # preserved
|
||||
self.assertEqual(watch['headers']['authorization'], 'Bearer api-token')
|
||||
self.assertEqual(watch['headers']['user-agent'], 'api-client') # overwrote existing
|
||||
self.assertEqual(watch['headers']['accept'], 'text/html') # preserved
|
||||
self.assertEqual(watch['notification_urls'], ['https://webhook.example.com'])
|
||||
|
||||
def test_watch_not_found(self):
|
||||
"""Test update_watch with non-existent UUID"""
|
||||
# Should not raise an error, just return silently
|
||||
fake_uuid = 'non-existent-uuid'
|
||||
update_obj = {'url': 'http://should-not-update.com'}
|
||||
|
||||
# Should not raise an exception
|
||||
self.datastore.update_watch(fake_uuid, update_obj)
|
||||
|
||||
# Verify no changes were made to existing watch
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
self.assertNotEqual(watch['url'], 'http://should-not-update.com')
|
||||
|
||||
def test_processor_style_update(self):
|
||||
"""Test the type of updates made by processors during check operations"""
|
||||
# Based on async_update_worker.py patterns
|
||||
update_obj = {
|
||||
'last_notification_error': False,
|
||||
'last_error': False,
|
||||
'previous_md5': 'abc123def456',
|
||||
'content-type': 'application/json',
|
||||
'consecutive_filter_failures': 0,
|
||||
'fetch_time': 1.234,
|
||||
'check_count': 42
|
||||
}
|
||||
self.datastore.update_watch(self.test_uuid, update_obj)
|
||||
|
||||
watch = self.datastore.data['watching'][self.test_uuid]
|
||||
|
||||
# Verify processor updates
|
||||
self.assertEqual(watch['last_notification_error'], False)
|
||||
self.assertEqual(watch['last_error'], False)
|
||||
self.assertEqual(watch['previous_md5'], 'abc123def456')
|
||||
self.assertEqual(watch['content-type'], 'application/json')
|
||||
self.assertEqual(watch['consecutive_filter_failures'], 0)
|
||||
self.assertEqual(watch['fetch_time'], 1.234)
|
||||
self.assertEqual(watch['check_count'], 42)
|
||||
|
||||
# Verify nested structures weren't affected
|
||||
self.assertEqual(watch['time_between_check']['days'], 1)
|
||||
self.assertEqual(watch['headers']['user-agent'], 'test-browser')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
@@ -69,6 +69,16 @@ services:
|
||||
# Maximum height of screenshots, default is 16000 px, screenshots will be clipped to this if exceeded.
|
||||
# RAM usage will be higher if you increase this.
|
||||
# - SCREENSHOT_MAX_HEIGHT=16000
|
||||
#
|
||||
# HTTPS SSL Mode for webserver, unset both of these, you may need to volume mount these files also.
|
||||
# ./cert.pem:/app/cert.pem and ./privkey.pem:/app/privkey.pem
|
||||
# - SSL_CERT_FILE=cert.pem
|
||||
# - SSL_PRIVKEY_FILE=privkey.pem
|
||||
#
|
||||
# LISTEN_HOST / "host", Same as -h
|
||||
# - LISTEN_HOST=::
|
||||
# - LISTEN_HOST=0.0.0.0
|
||||
|
||||
|
||||
# Comment out ports: when using behind a reverse proxy , enable networks: etc.
|
||||
ports:
|
||||
|
||||
Reference in New Issue
Block a user