mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-02 15:47:19 +00:00
Compare commits
40 Commits
0.50.1
...
3270-histo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5433dd0ed | ||
|
|
1a496add5b | ||
|
|
fd650955a8 | ||
|
|
27743ba818 | ||
|
|
df2e7ef034 | ||
|
|
afe252126c | ||
|
|
342e6119f1 | ||
|
|
e4ff87e970 | ||
|
|
e45a544f15 | ||
|
|
9a5abaa17a | ||
|
|
b8ecfff861 | ||
|
|
58e2a41c95 | ||
|
|
a7214db9c3 | ||
|
|
b9da4af64f | ||
|
|
b77105be7b | ||
|
|
3d5a544ea6 | ||
|
|
4f362385e1 | ||
|
|
a01d6169d2 | ||
|
|
9beda3911d | ||
|
|
5ed596bfa9 | ||
|
|
99ca8787ab | ||
|
|
8f1a6feb90 | ||
|
|
c0e229201b | ||
|
|
66bc7fbc04 | ||
|
|
530bd40ca5 | ||
|
|
36004cf74b | ||
|
|
c7374245e1 | ||
|
|
59df59e9cd | ||
|
|
c0c2898b91 | ||
|
|
abac660bac | ||
|
|
26de64d873 | ||
|
|
79d9a8ca28 | ||
|
|
5c391fbcad | ||
|
|
d7e24f64a5 | ||
|
|
d6427d823f | ||
|
|
47eb874f47 | ||
|
|
37019355fd | ||
|
|
a8e7f8236e | ||
|
|
2414b61fcb | ||
|
|
a63ffa89b1 |
14
.github/workflows/containers.yml
vendored
14
.github/workflows/containers.yml
vendored
@@ -103,6 +103,13 @@ jobs:
|
||||
# provenance: false
|
||||
|
||||
# A new tagged release is required, which builds :tag and :latest
|
||||
- name: Debug release info
|
||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||
run: |
|
||||
echo "Release tag: ${{ github.event.release.tag_name }}"
|
||||
echo "Github ref: ${{ github.ref }}"
|
||||
echo "Github ref name: ${{ github.ref_name }}"
|
||||
|
||||
- name: Docker meta :tag
|
||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||
uses: docker/metadata-action@v5
|
||||
@@ -112,9 +119,10 @@ jobs:
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io
|
||||
ghcr.io/dgtlmoon/changedetection.io
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=semver,pattern={{version}},value=${{ github.event.release.tag_name }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ github.event.release.tag_name }}
|
||||
type=semver,pattern={{major}},value=${{ github.event.release.tag_name }}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push :tag
|
||||
id: docker_build_tag_release
|
||||
|
||||
40
.github/workflows/test-container-build.yml
vendored
40
.github/workflows/test-container-build.yml
vendored
@@ -23,8 +23,28 @@ on:
|
||||
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
|
||||
# @todo: some kind of path filter for requirements.txt and Dockerfile
|
||||
jobs:
|
||||
test-container-build:
|
||||
builder:
|
||||
name: Build ${{ matrix.platform }} (${{ matrix.dockerfile == './Dockerfile' && 'main' || 'alpine' }})
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# Main Dockerfile platforms
|
||||
- platform: linux/amd64
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm64
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm/v7
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm/v8
|
||||
dockerfile: ./Dockerfile
|
||||
- platform: linux/arm64/v8
|
||||
dockerfile: ./Dockerfile
|
||||
# Alpine Dockerfile platforms (musl via alpine check)
|
||||
- platform: linux/amd64
|
||||
dockerfile: ./.github/test/Dockerfile-alpine
|
||||
- platform: linux/arm64
|
||||
dockerfile: ./.github/test/Dockerfile-alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
@@ -47,26 +67,14 @@ jobs:
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
# https://github.com/dgtlmoon/changedetection.io/pull/1067
|
||||
# Check we can still build under alpine/musl
|
||||
- name: Test that the docker containers can build (musl via alpine check)
|
||||
id: docker_build_musl
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./
|
||||
file: ./.github/test/Dockerfile-alpine
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Test that the docker containers can build
|
||||
- name: Test that the docker containers can build (${{ matrix.platform }} - ${{ matrix.dockerfile }})
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v6
|
||||
# https://github.com/docker/build-push-action#customizing
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v8,linux/arm64/v8
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
|
||||
@@ -179,6 +179,26 @@ jobs:
|
||||
|
||||
docker kill test-changedetectionio
|
||||
|
||||
- name: Test HTTPS SSL mode
|
||||
run: |
|
||||
openssl req -x509 -newkey rsa:4096 -keyout privkey.pem -out cert.pem -days 365 -nodes -subj "/CN=localhost"
|
||||
docker run --name test-changedetectionio-ssl --rm -e SSL_CERT_FILE=cert.pem -e SSL_PRIVKEY_FILE=privkey.pem -p 5000:5000 -v ./cert.pem:/app/cert.pem -v ./privkey.pem:/app/privkey.pem -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it
|
||||
# -k because its self-signed
|
||||
curl --retry-connrefused --retry 6 -k https://localhost:5000 -v|grep -q checkbox-uuid
|
||||
|
||||
docker kill test-changedetectionio-ssl
|
||||
|
||||
- name: Test IPv6 Mode
|
||||
run: |
|
||||
# IPv6 - :: bind to all interfaces inside container (like 0.0.0.0), ::1 would be localhost only
|
||||
docker run --name test-changedetectionio-ipv6 --rm -p 5000:5000 -e LISTEN_HOST=:: -d test-changedetectionio
|
||||
sleep 3
|
||||
# Should return 0 (no error) when grep finds it on localhost
|
||||
curl --retry-connrefused --retry 6 http://[::1]:5000 -v|grep -q checkbox-uuid
|
||||
docker kill test-changedetectionio-ipv6
|
||||
|
||||
- name: Test changedetection.io SIGTERM and SIGINT signal shutdown
|
||||
run: |
|
||||
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
Copyright 2025 Web Technologies s.r.o.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,11 +1,21 @@
|
||||
## Web Site Change Detection, Monitoring and Notification.
|
||||
# Monitor website changes
|
||||
|
||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||
Detect WebPage Changes Automatically — Monitor Web Page Changes in Real Time
|
||||
|
||||
Monitor websites for updates — get notified via Discord, Email, Slack, Telegram, Webhook and many more.
|
||||
|
||||
Detect web page content changes and get instant alerts.
|
||||
|
||||
|
||||
[Changedetection.io is the best tool to monitor web-pages for changes](https://changedetection.io) Track website content changes and receive notifications via Discord, Email, Slack, Telegram and 90+ more
|
||||
|
||||
Ideal for monitoring price changes, content edits, conditional changes and more.
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring, list of websites with changes" title="Self-hosted web page change monitoring, list of websites with changes" />](https://changedetection.io)
|
||||
|
||||
|
||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||
[**Don't have time? Try our extremely affordable subscription use our proxies and support!**](https://changedetection.io)
|
||||
|
||||
|
||||
|
||||
### Target specific parts of the webpage using the Visual Selector tool.
|
||||
|
||||
15
README.md
15
README.md
@@ -1,11 +1,13 @@
|
||||
## Web Site Change Detection, Restock monitoring and notifications.
|
||||
# Detect Website Changes Automatically — Monitor Web Page Changes in Real Time
|
||||
|
||||
**_Detect website content changes and perform meaningful actions - trigger notifications via Discord, Email, Slack, Telegram, API calls and many more._**
|
||||
Monitor websites for updates — get notified via Discord, Email, Slack, Telegram, Webhook and many more.
|
||||
|
||||
_Live your data-life pro-actively._
|
||||
**Detect web page content changes and get instant alerts.**
|
||||
|
||||
Ideal for monitoring price changes, content edits, conditional changes and more.
|
||||
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web site page change monitoring" title="Self-hosted web site page change monitoring" />](https://changedetection.io?src=github)
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Web site page change monitoring" title="Web site page change monitoring" />](https://changedetection.io?src=github)
|
||||
|
||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||
|
||||
@@ -13,6 +15,7 @@ _Live your data-life pro-actively._
|
||||
|
||||
[**Get started with website page change monitoring straight away. Don't have time? Try our $8.99/month subscription, use our proxies and support!**](https://changedetection.io) , _half the price of other website change monitoring services!_
|
||||
|
||||
|
||||
- Chrome browser included.
|
||||
- Nothing to install, access via browser login after signup.
|
||||
- Super fast, no registration needed setup.
|
||||
@@ -99,9 +102,7 @@ _Need an actual Chrome runner with Javascript support? We support fetching via W
|
||||
- Configurable [proxy per watch](https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration)
|
||||
- Send a screenshot with the notification when a change is detected in the web page
|
||||
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $100 using our signup link.
|
||||
|
||||
[Oxylabs](https://oxylabs.go2cloud.org/SH2d) is also an excellent proxy provider and well worth using, they offer Residental, ISP, Rotating and many other proxy types to suit your project.
|
||||
We [recommend and use Bright Data](https://brightdata.grsm.io/n0r16zf7eivq) global proxy services, Bright Data will match any first deposit up to $150 using our signup link.
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
__version__ = '0.50.01'
|
||||
__version__ = '0.50.5'
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from json.decoder import JSONDecodeError
|
||||
@@ -65,8 +65,7 @@ def main():
|
||||
|
||||
datastore_path = None
|
||||
do_cleanup = False
|
||||
host = "0.0.0.0"
|
||||
ipv6_enabled = False
|
||||
host = os.environ.get("LISTEN_HOST", "0.0.0.0").strip()
|
||||
port = int(os.environ.get('PORT', 5000))
|
||||
ssl_mode = False
|
||||
|
||||
@@ -108,10 +107,6 @@ def main():
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
if opt == '-6':
|
||||
logger.success("Enabling IPv6 listen support")
|
||||
ipv6_enabled = True
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
@@ -123,6 +118,20 @@ def main():
|
||||
if opt == '-l':
|
||||
logger_level = int(arg) if arg.isdigit() else arg.upper()
|
||||
|
||||
|
||||
logger.success(f"changedetection.io version {get_version()} starting.")
|
||||
# Launch using SocketIO run method for proper integration (if enabled)
|
||||
ssl_cert_file = os.getenv("SSL_CERT_FILE", 'cert.pem')
|
||||
ssl_privkey_file = os.getenv("SSL_PRIVKEY_FILE", 'privkey.pem')
|
||||
if os.getenv("SSL_CERT_FILE") and os.getenv("SSL_PRIVKEY_FILE"):
|
||||
ssl_mode = True
|
||||
|
||||
# SSL mode could have been set by -s too, therefor fallback to default values
|
||||
if ssl_mode:
|
||||
if not os.path.isfile(ssl_cert_file) or not os.path.isfile(ssl_privkey_file):
|
||||
logger.critical(f"Cannot start SSL/HTTPS mode, Please be sure that {ssl_cert_file}' and '{ssl_privkey_file}' exist in in {os.getcwd()}")
|
||||
os._exit(2)
|
||||
|
||||
# Without this, a logger will be duplicated
|
||||
logger.remove()
|
||||
try:
|
||||
@@ -222,19 +231,19 @@ def main():
|
||||
|
||||
|
||||
# SocketIO instance is already initialized in flask_app.py
|
||||
|
||||
# Launch using SocketIO run method for proper integration (if enabled)
|
||||
if socketio_server:
|
||||
if ssl_mode:
|
||||
socketio.run(app, host=host, port=int(port), debug=False,
|
||||
certfile='cert.pem', keyfile='privkey.pem', allow_unsafe_werkzeug=True)
|
||||
logger.success(f"SSL mode enabled, attempting to start with '{ssl_cert_file}' and '{ssl_privkey_file}' in {os.getcwd()}")
|
||||
socketio.run(app, host=host, port=int(port), debug=False,
|
||||
ssl_context=(ssl_cert_file, ssl_privkey_file), allow_unsafe_werkzeug=True)
|
||||
else:
|
||||
socketio.run(app, host=host, port=int(port), debug=False, allow_unsafe_werkzeug=True)
|
||||
else:
|
||||
# Run Flask app without Socket.IO if disabled
|
||||
logger.info("Starting Flask app without Socket.IO server")
|
||||
if ssl_mode:
|
||||
app.run(host=host, port=int(port), debug=False,
|
||||
ssl_context=('cert.pem', 'privkey.pem'))
|
||||
logger.success(f"SSL mode enabled, attempting to start with '{ssl_cert_file}' and '{ssl_privkey_file}' in {os.getcwd()}")
|
||||
app.run(host=host, port=int(port), debug=False,
|
||||
ssl_context=(ssl_cert_file, ssl_privkey_file))
|
||||
else:
|
||||
app.run(host=host, port=int(port), debug=False)
|
||||
|
||||
@@ -228,10 +228,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
watch.save_screenshot(screenshot=screenshot)
|
||||
watch.save_xpath_data(data=xpath_data)
|
||||
|
||||
except playwright._impl._api_types.Error as e:
|
||||
return make_response("Browser session ran out of time :( Please reload this page."+str(e), 401)
|
||||
except Exception as e:
|
||||
return make_response("Error fetching screenshot and element data - " + str(e), 401)
|
||||
return make_response(f"Error fetching screenshot and element data - {str(e)}", 401)
|
||||
|
||||
# SEND THIS BACK TO THE BROWSER
|
||||
output = {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import os
|
||||
import time
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from random import randint
|
||||
from loguru import logger
|
||||
|
||||
@@ -92,8 +90,32 @@ class steppable_browser_interface():
|
||||
if optional_value and ('{%' in optional_value or '{{' in optional_value):
|
||||
optional_value = jinja_render(template_str=optional_value)
|
||||
|
||||
# Trigger click and cautiously handle potential navigation
|
||||
# This means the page redirects/reloads/changes JS etc etc
|
||||
if call_action_name.startswith('click_'):
|
||||
try:
|
||||
# Set up navigation expectation before the click (like sync version)
|
||||
async with self.page.expect_event("framenavigated", timeout=3000) as navigation_info:
|
||||
await action_handler(selector, optional_value)
|
||||
|
||||
# Check if navigation actually occurred
|
||||
try:
|
||||
await navigation_info.value # This waits for the navigation promise
|
||||
logger.debug(f"Navigation occurred on {call_action_name}.")
|
||||
except Exception:
|
||||
logger.debug(f"No navigation occurred within timeout when calling {call_action_name}, that's OK, continuing.")
|
||||
|
||||
except Exception as e:
|
||||
# If expect_event itself times out, that means no navigation occurred - that's OK
|
||||
if "framenavigated" in str(e) and "exceeded" in str(e):
|
||||
logger.debug(f"No navigation occurred within timeout when calling {call_action_name}, that's OK, continuing.")
|
||||
else:
|
||||
raise e
|
||||
else:
|
||||
# Some other action that probably a navigation is not expected
|
||||
await action_handler(selector, optional_value)
|
||||
|
||||
|
||||
await action_handler(selector, optional_value)
|
||||
# Safely wait for timeout
|
||||
await self.page.wait_for_timeout(1.5 * 1000)
|
||||
logger.debug(f"Call action done in {time.time()-now:.2f}s")
|
||||
@@ -110,7 +132,7 @@ class steppable_browser_interface():
|
||||
|
||||
# Incase they request to go back to the start
|
||||
async def action_goto_site(self, selector=None, value=None):
|
||||
return await self.action_goto_url(value=self.start_url)
|
||||
return await self.action_goto_url(value=re.sub(r'^source:', '', self.start_url, flags=re.IGNORECASE))
|
||||
|
||||
async def action_click_element_containing_text(self, selector=None, value=''):
|
||||
logger.debug("Clicking element containing text")
|
||||
@@ -428,6 +450,9 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
try:
|
||||
# Get screenshot first
|
||||
screenshot = await capture_full_page_async(page=self.page)
|
||||
if not screenshot:
|
||||
logger.error("No screenshot was retrieved :((")
|
||||
|
||||
logger.debug(f"Time to get screenshot from browser {time.time() - now:.2f}s")
|
||||
|
||||
# Then get interactive elements
|
||||
@@ -450,6 +475,12 @@ class browsersteps_live_ui(steppable_browser_interface):
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting current state: {str(e)}")
|
||||
# If the page has navigated (common with logins) then the context is destroyed on navigation, continue
|
||||
# I'm not sure that this is required anymore because we have the "expect navigation wrapper" at the top
|
||||
if "Execution context was destroyed" in str(e):
|
||||
logger.debug("Execution context was destroyed, most likely because of navigation, continuing...")
|
||||
pass
|
||||
|
||||
# Attempt recovery - force garbage collection
|
||||
try:
|
||||
await self.page.request_gc()
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
<legend>Add a new organisational tag</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
<div>
|
||||
{{ render_simple_field(form.name, placeholder="watch label / tag") }}
|
||||
{{ render_simple_field(form.name, placeholder="Watch group / tag") }}
|
||||
</div>
|
||||
<div>
|
||||
{{ render_simple_field(form.save_button, title="Save" ) }}
|
||||
|
||||
@@ -159,12 +159,20 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, worker_handle
|
||||
def mark_all_viewed():
|
||||
# Save the current newest history as the most recently viewed
|
||||
with_errors = request.args.get('with_errors') == "1"
|
||||
tag_limit = request.args.get('tag')
|
||||
logger.debug(f"Limiting to tag {tag_limit}")
|
||||
now = int(time.time())
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if with_errors and not watch.get('last_error'):
|
||||
continue
|
||||
datastore.set_last_viewed(watch_uuid, int(time.time()))
|
||||
|
||||
return redirect(url_for('watchlist.index'))
|
||||
if tag_limit and ( not watch.get('tags') or tag_limit not in watch['tags'] ):
|
||||
logger.debug(f"Skipping watch {watch_uuid}")
|
||||
continue
|
||||
|
||||
datastore.set_last_viewed(watch_uuid, now)
|
||||
|
||||
return redirect(url_for('watchlist.index', tag=tag_limit))
|
||||
|
||||
@ui_blueprint.route("/delete", methods=['GET'])
|
||||
@login_optionally_required
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from flask import Blueprint, request, redirect, url_for, flash, render_template, make_response, send_from_directory, abort
|
||||
from flask_login import current_user
|
||||
import os
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from loguru import logger
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio.auth_decorator import login_optionally_required
|
||||
@@ -78,7 +77,42 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
|
||||
return output
|
||||
|
||||
@views_blueprint.route("/diff/<string:uuid>", methods=['GET', 'POST'])
|
||||
@views_blueprint.route("/diff/<string:uuid>", methods=['POST'])
|
||||
@login_optionally_required
|
||||
def diff_history_page_build_report(uuid):
|
||||
from changedetectionio import forms
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
try:
|
||||
watch = datastore.data['watching'][uuid]
|
||||
except KeyError:
|
||||
flash("No history found for the specified link, bad link?", "error")
|
||||
return redirect(url_for('watchlist.index'))
|
||||
|
||||
# For submission of requesting an extract
|
||||
extract_form = forms.extractDataForm(request.form)
|
||||
if not extract_form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
else:
|
||||
extract_regex = request.form.get('extract_regex').strip()
|
||||
output = watch.extract_regex_from_all_history(extract_regex)
|
||||
if output:
|
||||
watch_dir = os.path.join(datastore.datastore_path, uuid)
|
||||
response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True))
|
||||
response.headers['Content-type'] = 'text/csv'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = "0"
|
||||
return response
|
||||
|
||||
flash('Nothing matches that RegEx', 'error')
|
||||
redirect(url_for('ui_views.diff_history_page', uuid=uuid) + '#extract')
|
||||
|
||||
@views_blueprint.route("/diff/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def diff_history_page(uuid):
|
||||
from changedetectionio import forms
|
||||
@@ -96,60 +130,31 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
|
||||
# For submission of requesting an extract
|
||||
extract_form = forms.extractDataForm(request.form)
|
||||
if request.method == 'POST':
|
||||
if not extract_form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
else:
|
||||
extract_regex = request.form.get('extract_regex').strip()
|
||||
output = watch.extract_regex_from_all_history(extract_regex)
|
||||
if output:
|
||||
watch_dir = os.path.join(datastore.datastore_path, uuid)
|
||||
response = make_response(send_from_directory(directory=watch_dir, path=output, as_attachment=True))
|
||||
response.headers['Content-type'] = 'text/csv'
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers['Expires'] = 0
|
||||
return response
|
||||
|
||||
flash('Nothing matches that RegEx', 'error')
|
||||
redirect(url_for('ui_views.diff_history_page', uuid=uuid)+'#extract')
|
||||
|
||||
history = watch.history
|
||||
dates = list(history.keys())
|
||||
|
||||
if len(dates) < 2:
|
||||
flash("Not enough saved change detection snapshots to produce a report.", "error")
|
||||
return redirect(url_for('watchlist.index'))
|
||||
# If a "from_version" was requested, then find it (or the closest one)
|
||||
# Also set "from version" to be the closest version to the one that was last viewed.
|
||||
|
||||
# Save the current newest history as the most recently viewed
|
||||
datastore.set_last_viewed(uuid, time.time())
|
||||
best_last_viewed_timestamp = watch.get_from_version_based_on_last_viewed
|
||||
from_version_timestamp = best_last_viewed_timestamp if best_last_viewed_timestamp else dates[-2]
|
||||
from_version = request.args.get('from_version', from_version_timestamp )
|
||||
|
||||
# Read as binary and force decode as UTF-8
|
||||
# Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
|
||||
from_version = request.args.get('from_version')
|
||||
from_version_index = -2 # second newest
|
||||
if from_version and from_version in dates:
|
||||
from_version_index = dates.index(from_version)
|
||||
else:
|
||||
from_version = dates[from_version_index]
|
||||
# Use the current one if nothing was specified
|
||||
to_version = request.args.get('to_version', str(dates[-1]))
|
||||
|
||||
try:
|
||||
from_version_file_contents = watch.get_history_snapshot(dates[from_version_index])
|
||||
to_version_file_contents = watch.get_history_snapshot(timestamp=to_version)
|
||||
except Exception as e:
|
||||
from_version_file_contents = f"Unable to read to-version at index {dates[from_version_index]}.\n"
|
||||
|
||||
to_version = request.args.get('to_version')
|
||||
to_version_index = -1
|
||||
if to_version and to_version in dates:
|
||||
to_version_index = dates.index(to_version)
|
||||
else:
|
||||
to_version = dates[to_version_index]
|
||||
logger.error(f"Unable to read watch history to-version for version {to_version}: {str(e)}")
|
||||
to_version_file_contents = f"Unable to read to-version at {to_version}.\n"
|
||||
|
||||
try:
|
||||
to_version_file_contents = watch.get_history_snapshot(dates[to_version_index])
|
||||
from_version_file_contents = watch.get_history_snapshot(timestamp=from_version)
|
||||
except Exception as e:
|
||||
to_version_file_contents = "Unable to read to-version at index{}.\n".format(dates[to_version_index])
|
||||
logger.error(f"Unable to read watch history from-version for version {from_version}: {str(e)}")
|
||||
from_version_file_contents = f"Unable to read to-version {from_version}.\n"
|
||||
|
||||
screenshot_url = watch.get_screenshot()
|
||||
|
||||
@@ -163,6 +168,8 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
|
||||
if datastore.data['settings']['application'].get('password') or os.getenv("SALTED_PASS", False):
|
||||
password_enabled_and_share_is_off = not datastore.data['settings']['application'].get('shared_diff_access')
|
||||
|
||||
datastore.set_last_viewed(uuid, time.time())
|
||||
|
||||
output = render_template("diff.html",
|
||||
current_diff_url=watch['url'],
|
||||
from_version=str(from_version),
|
||||
|
||||
@@ -18,19 +18,20 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
transition: background-size 0.9s ease
|
||||
}
|
||||
</style>
|
||||
<div class="box">
|
||||
<div class="box" id="form-quick-watch-add">
|
||||
|
||||
<form class="pure-form" action="{{ url_for('ui.ui_views.form_quick_watch_add', tag=active_tag_uuid) }}" method="POST" id="new-watch-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
<legend>Add a new web page change detection watch</legend>
|
||||
<div id="watch-add-wrapper-zone">
|
||||
|
||||
{{ render_nolabel_field(form.url, placeholder="https://...", required=true) }}
|
||||
{{ render_nolabel_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="watch label / tag") }}
|
||||
{{ render_nolabel_field(form.watch_submit_button, title="Watch this URL!" ) }}
|
||||
{{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
|
||||
</div>
|
||||
<div id="watch-group-tag">
|
||||
{{ render_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="Watch group / tag", class="transparent-field") }}
|
||||
</div>
|
||||
<div id="quick-watch-processor-type">
|
||||
{{ render_simple_field(form.processor) }}
|
||||
</div>
|
||||
@@ -38,7 +39,8 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
</fieldset>
|
||||
<span style="color:#eee; font-size: 80%;"><img alt="Create a shareable link" style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread-white.svg')}}" > Tip: You can also add 'shared' watches. <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Sharing-a-Watch">More info</a></span>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
<div class="box">
|
||||
<form class="pure-form" action="{{ url_for('ui.form_watch_list_checkbox_operations') }}" method="POST" id="watch-list-form">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}" >
|
||||
<input type="hidden" id="op_extradata" name="op_extradata" value="" >
|
||||
@@ -144,7 +146,7 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
{%- if watch.is_pdf -%}<img class="status-icon" src="{{url_for('static_content', group='images', filename='pdf-icon.svg')}}" alt="Converting PDF to text" >{%- endif -%}
|
||||
{%- if watch.has_browser_steps -%}<img class="status-icon status-browsersteps" src="{{url_for('static_content', group='images', filename='steps.svg')}}" alt="Browser Steps is enabled" >{%- endif -%}
|
||||
|
||||
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list)|safe }}</div>
|
||||
<div class="error-text" style="display:none;">{{ watch.compile_error_texts(has_proxies=datastore.proxy_list) }}</div>
|
||||
|
||||
{%- if watch['processor'] == 'text_json_diff' -%}
|
||||
{%- if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] -%}
|
||||
@@ -212,9 +214,14 @@ document.addEventListener('DOMContentLoaded', function() {
|
||||
<li id="post-list-mark-views" class="{%- if has_unviewed -%}has-unviewed{%- endif -%}" style="display: none;" >
|
||||
<a href="{{url_for('ui.mark_all_viewed',with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed</a>
|
||||
</li>
|
||||
{%- if active_tag_uuid -%}
|
||||
<li id="post-list-mark-views-tag">
|
||||
<a href="{{url_for('ui.mark_all_viewed', tag=active_tag_uuid) }}" class="pure-button button-tag " id="mark-all-viewed">Mark all viewed in '{{active_tag.title}}'</a>
|
||||
</li>
|
||||
{%- endif -%}
|
||||
<li>
|
||||
<a href="{{ url_for('ui.form_watch_checknow', tag=active_tag_uuid, with_errors=request.args.get('with_errors',0)) }}" class="pure-button button-tag" id="recheck-all">Recheck
|
||||
all {%- if active_tag_uuid-%} in "{{active_tag.title}}"{%endif%}</a>
|
||||
all {% if active_tag_uuid %} in '{{active_tag.title}}'{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('rss.feed', tag=active_tag_uuid, token=app_rss_token)}}"><img alt="RSS Feed" id="feed-icon" src="{{url_for('static_content', group='images', filename='generic_feed-icon.svg')}}" height="15"></a>
|
||||
|
||||
@@ -16,7 +16,6 @@ operator_choices = [
|
||||
("==", "Equals"),
|
||||
("!=", "Not Equals"),
|
||||
("in", "Contains"),
|
||||
("!in", "Does Not Contain"),
|
||||
]
|
||||
|
||||
# Fields available in the rules
|
||||
|
||||
@@ -21,17 +21,21 @@ def register_operators():
|
||||
def length_max(_, text, strlen):
|
||||
return len(text) <= int(strlen)
|
||||
|
||||
# ✅ Custom function for case-insensitive regex matching
|
||||
# Custom function for case-insensitive regex matching
|
||||
def contains_regex(_, text, pattern):
|
||||
"""Returns True if `text` contains `pattern` (case-insensitive regex match)."""
|
||||
return bool(re.search(pattern, str(text), re.IGNORECASE))
|
||||
|
||||
# ✅ Custom function for NOT matching case-insensitive regex
|
||||
# Custom function for NOT matching case-insensitive regex
|
||||
def not_contains_regex(_, text, pattern):
|
||||
"""Returns True if `text` does NOT contain `pattern` (case-insensitive regex match)."""
|
||||
return not bool(re.search(pattern, str(text), re.IGNORECASE))
|
||||
|
||||
def not_contains(_, text, pattern):
|
||||
return not pattern in text
|
||||
|
||||
return {
|
||||
"!in": not_contains,
|
||||
"!contains_regex": not_contains_regex,
|
||||
"contains_regex": contains_regex,
|
||||
"ends_with": ends_with,
|
||||
@@ -43,6 +47,7 @@ def register_operators():
|
||||
@hookimpl
|
||||
def register_operator_choices():
|
||||
return [
|
||||
("!in", "Does NOT Contain"),
|
||||
("starts_with", "Text Starts With"),
|
||||
("ends_with", "Text Ends With"),
|
||||
("length_min", "Length minimum"),
|
||||
|
||||
@@ -51,7 +51,15 @@ async def capture_full_page(page):
|
||||
await page.setViewport({'width': page.viewport['width'], 'height': step_size})
|
||||
|
||||
while y < min(page_height, SCREENSHOT_MAX_TOTAL_HEIGHT):
|
||||
await page.evaluate(f"window.scrollTo(0, {y})")
|
||||
# better than scrollTo incase they override it in the page
|
||||
await page.evaluate(
|
||||
"""(y) => {
|
||||
document.documentElement.scrollTop = y;
|
||||
document.body.scrollTop = y;
|
||||
}""",
|
||||
y
|
||||
)
|
||||
|
||||
screenshot_chunks.append(await page.screenshot(type_='jpeg',
|
||||
fullPage=False,
|
||||
quality=int(os.getenv("SCREENSHOT_QUALITY", 72))))
|
||||
@@ -149,7 +157,11 @@ class fetcher(Fetcher):
|
||||
):
|
||||
import re
|
||||
self.delete_browser_steps_screenshots()
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
|
||||
n = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
extra_wait = min(n, 15)
|
||||
|
||||
logger.debug(f"Extra wait set to {extra_wait}s, requested was {n}s.")
|
||||
|
||||
from pyppeteer import Pyppeteer
|
||||
pyppeteer_instance = Pyppeteer()
|
||||
@@ -165,7 +177,7 @@ class fetcher(Fetcher):
|
||||
except websockets.exceptions.InvalidURI:
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser, check your browser connection address (should be ws:// or wss://")
|
||||
except Exception as e:
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser {str(e)}")
|
||||
raise BrowserConnectError(msg=f"Error connecting to the browser - Exception '{str(e)}'")
|
||||
|
||||
# Better is to launch chrome with the URL as arg
|
||||
# non-headless - newPage() will launch an extra tab/window, .browser should already contain 1 page/tab
|
||||
@@ -227,13 +239,35 @@ class fetcher(Fetcher):
|
||||
# browsersteps_interface = steppable_browser_interface()
|
||||
# browsersteps_interface.page = self.page
|
||||
|
||||
response = await self.page.goto(url, waitUntil="load")
|
||||
async def handle_frame_navigation(event):
|
||||
logger.debug(f"Frame navigated: {event}")
|
||||
w = extra_wait - 2 if extra_wait > 4 else 2
|
||||
logger.debug(f"Waiting {w} seconds before calling Page.stopLoading...")
|
||||
await asyncio.sleep(w)
|
||||
logger.debug("Issuing stopLoading command...")
|
||||
await self.page._client.send('Page.stopLoading')
|
||||
logger.debug("stopLoading command sent!")
|
||||
|
||||
if response is None:
|
||||
await self.page.close()
|
||||
await browser.close()
|
||||
logger.warning("Content Fetcher > Response object was none (as in, the response from the browser was empty, not just the content)")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
self.page._client.on('Page.frameStartedNavigating', lambda event: asyncio.create_task(handle_frame_navigation(event)))
|
||||
self.page._client.on('Page.frameStartedLoading', lambda event: asyncio.create_task(handle_frame_navigation(event)))
|
||||
self.page._client.on('Page.frameStoppedLoading', lambda event: logger.debug(f"Frame stopped loading: {event}"))
|
||||
|
||||
response = None
|
||||
attempt=0
|
||||
while not response:
|
||||
logger.debug(f"Attempting page fetch {url} attempt {attempt}")
|
||||
response = await self.page.goto(url)
|
||||
await asyncio.sleep(1 + extra_wait)
|
||||
if response:
|
||||
break
|
||||
if not response:
|
||||
logger.warning("Page did not fetch! trying again!")
|
||||
if response is None and attempt>=2:
|
||||
await self.page.close()
|
||||
await browser.close()
|
||||
logger.warning(f"Content Fetcher > Response object was none (as in, the response from the browser was empty, not just the content) exiting attmpt {attempt}")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
attempt+=1
|
||||
|
||||
self.headers = response.headers
|
||||
|
||||
@@ -276,7 +310,6 @@ class fetcher(Fetcher):
|
||||
# if self.browser_steps_get_valid_steps():
|
||||
# self.iterate_browser_steps()
|
||||
|
||||
await asyncio.sleep(1 + extra_wait)
|
||||
|
||||
# So we can find an element on the page where its selector was entered manually (maybe not xPath etc)
|
||||
# Setup the xPath/VisualSelector scraper
|
||||
|
||||
@@ -18,6 +18,7 @@ async () => {
|
||||
'back-order or out of stock',
|
||||
'backordered',
|
||||
'benachrichtigt mich', // notify me
|
||||
'binnenkort leverbaar', // coming soon
|
||||
'brak na stanie',
|
||||
'brak w magazynie',
|
||||
'coming soon',
|
||||
@@ -85,6 +86,7 @@ async () => {
|
||||
'tidak tersedia',
|
||||
'tijdelijk uitverkocht',
|
||||
'tiket tidak tersedia',
|
||||
'to subscribe to back in stock',
|
||||
'tükendi',
|
||||
'unavailable nearby',
|
||||
'unavailable tickets',
|
||||
@@ -119,8 +121,7 @@ async () => {
|
||||
return text.toLowerCase().trim();
|
||||
}
|
||||
|
||||
const negateOutOfStockRegex = new RegExp('^([0-9] in stock|add to cart|in stock)', 'ig');
|
||||
|
||||
const negateOutOfStockRegex = new RegExp('^([0-9] in stock|add to cart|in stock|arrives approximately)', 'ig');
|
||||
// The out-of-stock or in-stock-text is generally always above-the-fold
|
||||
// and often below-the-fold is a list of related products that may or may not contain trigger text
|
||||
// so it's good to filter to just the 'above the fold' elements
|
||||
|
||||
@@ -3,6 +3,41 @@ import asyncio
|
||||
from blinker import signal
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class NotificationQueue(queue.Queue):
|
||||
"""
|
||||
Extended Queue that sends a 'notification_event' signal when notifications are added.
|
||||
|
||||
This class extends the standard Queue and adds a signal emission after a notification
|
||||
is put into the queue. The signal includes the watch UUID if available.
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize=0):
|
||||
super().__init__(maxsize)
|
||||
try:
|
||||
self.notification_event_signal = signal('notification_event')
|
||||
except Exception as e:
|
||||
logger.critical(f"Exception creating notification_event signal: {e}")
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
# Call the parent's put method first
|
||||
super().put(item, block, timeout)
|
||||
|
||||
# After putting the notification in the queue, emit signal with watch UUID
|
||||
try:
|
||||
if self.notification_event_signal and isinstance(item, dict):
|
||||
watch_uuid = item.get('uuid')
|
||||
if watch_uuid:
|
||||
# Send the notification_event signal with the watch UUID
|
||||
self.notification_event_signal.send(watch_uuid=watch_uuid)
|
||||
logger.trace(f"NotificationQueue: Emitted notification_event signal for watch UUID {watch_uuid}")
|
||||
else:
|
||||
# Send signal without UUID for system notifications
|
||||
self.notification_event_signal.send()
|
||||
logger.trace("NotificationQueue: Emitted notification_event signal for system notification")
|
||||
except Exception as e:
|
||||
logger.error(f"Exception emitting notification_event signal: {e}")
|
||||
|
||||
class SignalPriorityQueue(queue.PriorityQueue):
|
||||
"""
|
||||
Extended PriorityQueue that sends a signal when items with a UUID are added.
|
||||
|
||||
@@ -12,7 +12,7 @@ from blinker import signal
|
||||
|
||||
from changedetectionio.strtobool import strtobool
|
||||
from threading import Event
|
||||
from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue
|
||||
from changedetectionio.custom_queue import SignalPriorityQueue, AsyncSignalPriorityQueue, NotificationQueue
|
||||
from changedetectionio import worker_handler
|
||||
|
||||
from flask import (
|
||||
@@ -52,7 +52,7 @@ extra_stylesheets = []
|
||||
|
||||
# Use async queue by default, keep sync for backward compatibility
|
||||
update_q = AsyncSignalPriorityQueue() if worker_handler.USE_ASYNC_WORKERS else SignalPriorityQueue()
|
||||
notification_q = queue.Queue()
|
||||
notification_q = NotificationQueue()
|
||||
MAX_QUEUE_SIZE = 2000
|
||||
|
||||
app = Flask(__name__,
|
||||
@@ -100,7 +100,7 @@ watch_api = Api(app, decorators=[csrf.exempt])
|
||||
def init_app_secret(datastore_path):
|
||||
secret = ""
|
||||
|
||||
path = "{}/secret.txt".format(datastore_path)
|
||||
path = os.path.join(datastore_path, "secret.txt")
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
|
||||
@@ -738,7 +738,7 @@ class globalSettingsRequestForm(Form):
|
||||
return False
|
||||
|
||||
class globalSettingsApplicationUIForm(Form):
|
||||
open_diff_in_new_tab = BooleanField('Open diff page in a new tab', default=True, validators=[validators.Optional()])
|
||||
open_diff_in_new_tab = BooleanField("Open 'History' page in a new tab", default=True, validators=[validators.Optional()])
|
||||
socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()])
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
|
||||
@@ -8,6 +8,7 @@ import re
|
||||
from pathlib import Path
|
||||
from loguru import logger
|
||||
|
||||
from .. import safe_jinja
|
||||
from ..html_tools import TRANSLATE_WHITESPACE_TABLE
|
||||
|
||||
# Allowable protocols, protects against javascript: etc
|
||||
@@ -691,11 +692,11 @@ class model(watch_base):
|
||||
output.append(str(Markup(f"<div class=\"notification-error\"><a href=\"{url_for('settings.notification_logs')}\">{ self.get('last_notification_error') }</a></div>")))
|
||||
|
||||
else:
|
||||
# Lo_Fi version
|
||||
# Lo_Fi version - no app context, cant rely on Jinja2 Markup
|
||||
if last_error:
|
||||
output.append(str(Markup(last_error)))
|
||||
output.append(safe_jinja.render_fully_escaped(last_error))
|
||||
if self.get('last_notification_error'):
|
||||
output.append(str(Markup(self.get('last_notification_error'))))
|
||||
output.append(safe_jinja.render_fully_escaped(self.get('last_notification_error')))
|
||||
|
||||
res = "\n".join(output)
|
||||
return res
|
||||
|
||||
@@ -29,6 +29,11 @@ class SignalHandler:
|
||||
watch_delete_signal = signal('watch_deleted')
|
||||
watch_delete_signal.connect(self.handle_deleted_signal, weak=False)
|
||||
|
||||
# Connect to the notification_event signal
|
||||
notification_event_signal = signal('notification_event')
|
||||
notification_event_signal.connect(self.handle_notification_event, weak=False)
|
||||
logger.info("SignalHandler: Connected to notification_event signal")
|
||||
|
||||
# Create and start the queue update thread using standard threading
|
||||
import threading
|
||||
self.polling_emitter_thread = threading.Thread(
|
||||
@@ -89,6 +94,23 @@ class SignalHandler:
|
||||
except Exception as e:
|
||||
logger.error(f"Socket.IO error in handle_queue_length: {str(e)}")
|
||||
|
||||
def handle_notification_event(self, *args, **kwargs):
|
||||
"""Handle notification_event signal and emit to all clients"""
|
||||
try:
|
||||
watch_uuid = kwargs.get('watch_uuid')
|
||||
logger.debug(f"SignalHandler: Notification event received for watch UUID: {watch_uuid}")
|
||||
|
||||
# Emit the notification event to all connected clients
|
||||
self.socketio_instance.emit("notification_event", {
|
||||
"watch_uuid": watch_uuid,
|
||||
"event_timestamp": time.time()
|
||||
})
|
||||
|
||||
logger.trace(f"Socket.IO: Emitted notification_event for watch UUID {watch_uuid}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Socket.IO error in handle_notification_event: {str(e)}")
|
||||
|
||||
|
||||
def polling_emit_running_or_queued_watches_threaded(self):
|
||||
"""Threading version of polling for Windows compatibility"""
|
||||
|
||||
@@ -10,9 +10,15 @@ import os
|
||||
|
||||
JINJA2_MAX_RETURN_PAYLOAD_SIZE = 1024 * int(os.getenv("JINJA2_MAX_RETURN_PAYLOAD_SIZE_KB", 1024 * 10))
|
||||
|
||||
|
||||
# This is used for notifications etc, so actually it's OK to send custom HTML such as <a href> etc, but it should limit what data is available.
|
||||
# (Which also limits available functions that could be called)
|
||||
def render(template_str, **args: t.Any) -> str:
|
||||
jinja2_env = jinja2.sandbox.ImmutableSandboxedEnvironment(extensions=['jinja2_time.TimeExtension'])
|
||||
output = jinja2_env.from_string(template_str).render(args)
|
||||
return output[:JINJA2_MAX_RETURN_PAYLOAD_SIZE]
|
||||
|
||||
def render_fully_escaped(content):
|
||||
env = jinja2.sandbox.ImmutableSandboxedEnvironment(autoescape=True)
|
||||
template = env.from_string("{{ some_html|e }}")
|
||||
return template.render(some_html=content)
|
||||
|
||||
|
||||
@@ -48,13 +48,14 @@ $(document).ready(function () {
|
||||
// Connect to Socket.IO on the same host/port, with path from template
|
||||
const socket = io({
|
||||
path: socketio_url, // This will be the path prefix like "/app/socket.io" from the template
|
||||
transports: ['polling', 'websocket'], // Try WebSocket but fall back to polling
|
||||
reconnectionDelay: 1000,
|
||||
reconnectionAttempts: 15
|
||||
transports: ['websocket', 'polling'],
|
||||
reconnectionDelay: 3000,
|
||||
reconnectionAttempts: 25
|
||||
});
|
||||
|
||||
// Connection status logging
|
||||
socket.on('connect', function () {
|
||||
$('#realtime-conn-error').hide();
|
||||
console.log('Socket.IO connected with path:', socketio_url);
|
||||
console.log('Socket transport:', socket.io.engine.transport.name);
|
||||
bindSocketHandlerButtonsEvents(socket);
|
||||
@@ -74,7 +75,8 @@ $(document).ready(function () {
|
||||
|
||||
socket.on('disconnect', function (reason) {
|
||||
console.log('Socket.IO disconnected, reason:', reason);
|
||||
$('.ajax-op').off('.socketHandlerNamespace')
|
||||
$('.ajax-op').off('.socketHandlerNamespace');
|
||||
$('#realtime-conn-error').show();
|
||||
});
|
||||
|
||||
socket.on('queue_size', function (data) {
|
||||
@@ -92,6 +94,16 @@ $(document).ready(function () {
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('notification_event', function (data) {
|
||||
console.log(`Stub handler for notification_event ${data.watch_uuid}`)
|
||||
});
|
||||
|
||||
socket.on('watch_deleted', function (data) {
|
||||
$('tr[data-watch-uuid="' + data.uuid + '"] td').fadeOut(500, function () {
|
||||
$(this).closest('tr').remove();
|
||||
});
|
||||
});
|
||||
|
||||
// Listen for periodically emitted watch data
|
||||
console.log('Adding watch_update event listener');
|
||||
|
||||
|
||||
@@ -16,6 +16,12 @@ $(function () {
|
||||
$('#op_extradata').val(prompt("Enter a tag name"));
|
||||
});
|
||||
|
||||
|
||||
$('.history-link').click(function (e) {
|
||||
// Incase they click 'back' in the browser, it should be removed.
|
||||
$(this).closest('tr').removeClass('unviewed');
|
||||
});
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
|
||||
@@ -71,6 +71,7 @@
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-border-input: var(--color-grey-500);
|
||||
--color-shadow-input: var(--color-grey-400);
|
||||
@@ -97,6 +98,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-second: #1e316c;
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
|
||||
@@ -78,6 +78,7 @@
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
|
||||
--color-border-input: var(--color-grey-500);
|
||||
@@ -112,6 +113,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
|
||||
@@ -17,11 +17,13 @@
|
||||
&.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal;
|
||||
a::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
}
|
||||
|
||||
a.external::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -185,7 +185,8 @@ code {
|
||||
}
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
max-width: 100%;
|
||||
margin: 0 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
@@ -279,7 +280,7 @@ a.pure-button-selected {
|
||||
font-size: 65%;
|
||||
border-bottom-left-radius: initial;
|
||||
border-bottom-right-radius: initial;
|
||||
|
||||
margin-right: 4px;
|
||||
&.active {
|
||||
background: var(--color-background-button-tag-active);
|
||||
font-weight: bold;
|
||||
@@ -372,11 +373,32 @@ label {
|
||||
}
|
||||
}
|
||||
|
||||
// Some field colouring for transperant field
|
||||
.pure-form input[type=text].transparent-field {
|
||||
background-color: var(--color-background-new-watch-input-transparent) !important;
|
||||
color: var(--color-white) !important;
|
||||
border: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
box-shadow: none !important;
|
||||
-webkit-box-shadow: none !important;
|
||||
&::placeholder {
|
||||
opacity: 0.5;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
font-weight: lighter;
|
||||
}
|
||||
}
|
||||
|
||||
#new-watch-form {
|
||||
background: var(--color-background-new-watch-form);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
max-width: 100%;
|
||||
|
||||
#url {
|
||||
&::placeholder {
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
input {
|
||||
display: inline-block;
|
||||
@@ -397,12 +419,13 @@ label {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
@media only screen and (min-width: 760px) {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row;
|
||||
min-width: 70vw;
|
||||
}
|
||||
/* URL field grows always, other stay static in width */
|
||||
> span {
|
||||
@@ -424,6 +447,22 @@ label {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#watch-group-tag {
|
||||
font-size: 0.9rem;
|
||||
padding: 0.3rem;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
color: var(--color-white);
|
||||
label, input {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
input {
|
||||
flex: 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -620,10 +659,6 @@ footer {
|
||||
|
||||
@media only screen and (max-width: 760px),
|
||||
(min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0;
|
||||
@@ -1143,16 +1178,14 @@ ul {
|
||||
color: #fff;
|
||||
ul {
|
||||
padding: 0.3rem;
|
||||
|
||||
li {
|
||||
list-style: none;
|
||||
font-size: 0.8rem;
|
||||
font-size: 0.9rem;
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.restock-label {
|
||||
@@ -1190,3 +1223,12 @@ ul {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
#realtime-conn-error {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 30px;
|
||||
background: var(--color-warning);
|
||||
padding: 10px;
|
||||
font-size: 0.8rem;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
@@ -322,6 +322,7 @@ ul#requests-extra_browsers {
|
||||
--color-text-watch-tag-list: rgba(231, 0, 105, 0.4);
|
||||
--color-background-new-watch-form: rgba(0, 0, 0, 0.05);
|
||||
--color-background-new-watch-input: var(--color-white);
|
||||
--color-background-new-watch-input-transparent: rgba(255, 255, 255, 0.1);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-border-input: var(--color-grey-500);
|
||||
--color-shadow-input: var(--color-grey-400);
|
||||
@@ -348,6 +349,7 @@ html[data-darkmode="true"] {
|
||||
--color-background-gradient-second: #1e316c;
|
||||
--color-background-gradient-third: #4d2c64;
|
||||
--color-background-new-watch-input: var(--color-grey-100);
|
||||
--color-background-new-watch-input-transparent: var(--color-grey-100);
|
||||
--color-text-new-watch-input: var(--color-text);
|
||||
--color-background-table-thead: var(--color-grey-200);
|
||||
--color-table-background: var(--color-grey-300);
|
||||
@@ -537,9 +539,9 @@ body.preview-text-enabled {
|
||||
.watch-table td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal; }
|
||||
.watch-table td.title-col a::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px; }
|
||||
.watch-table td a.external::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px; }
|
||||
.watch-table th {
|
||||
white-space: nowrap; }
|
||||
.watch-table th a {
|
||||
@@ -826,7 +828,8 @@ code {
|
||||
background: var(--color-text-watch-tag-list); }
|
||||
|
||||
.box {
|
||||
max-width: 80%;
|
||||
max-width: 100%;
|
||||
margin: 0 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
justify-content: center; }
|
||||
@@ -899,7 +902,8 @@ a.pure-button-selected {
|
||||
color: var(--color-text-button);
|
||||
font-size: 65%;
|
||||
border-bottom-left-radius: initial;
|
||||
border-bottom-right-radius: initial; }
|
||||
border-bottom-right-radius: initial;
|
||||
margin-right: 4px; }
|
||||
.button-tag.active {
|
||||
background: var(--color-background-button-tag-active);
|
||||
font-weight: bold; }
|
||||
@@ -962,11 +966,25 @@ label:hover {
|
||||
#token-table.pure-table th {
|
||||
font-size: 80%; }
|
||||
|
||||
.pure-form input[type=text].transparent-field {
|
||||
background-color: var(--color-background-new-watch-input-transparent) !important;
|
||||
color: var(--color-white) !important;
|
||||
border: 1px solid rgba(255, 255, 255, 0.2) !important;
|
||||
box-shadow: none !important;
|
||||
-webkit-box-shadow: none !important; }
|
||||
.pure-form input[type=text].transparent-field::placeholder {
|
||||
opacity: 0.5;
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
font-weight: lighter; }
|
||||
|
||||
#new-watch-form {
|
||||
background: var(--color-background-new-watch-form);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em; }
|
||||
margin-bottom: 1em;
|
||||
max-width: 100%; }
|
||||
#new-watch-form #url::placeholder {
|
||||
font-weight: bold; }
|
||||
#new-watch-form input {
|
||||
display: inline-block;
|
||||
margin-bottom: 5px; }
|
||||
@@ -984,7 +1002,8 @@ label:hover {
|
||||
#new-watch-form #watch-add-wrapper-zone {
|
||||
display: flex;
|
||||
gap: 0.3rem;
|
||||
flex-direction: row; } }
|
||||
flex-direction: row;
|
||||
min-width: 70vw; } }
|
||||
#new-watch-form #watch-add-wrapper-zone > span {
|
||||
flex-grow: 0; }
|
||||
#new-watch-form #watch-add-wrapper-zone > span input {
|
||||
@@ -995,6 +1014,17 @@ label:hover {
|
||||
@media only screen and (max-width: 760px) {
|
||||
#new-watch-form #watch-add-wrapper-zone #url {
|
||||
width: 100%; } }
|
||||
#new-watch-form #watch-group-tag {
|
||||
font-size: 0.9rem;
|
||||
padding: 0.3rem;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
color: var(--color-white); }
|
||||
#new-watch-form #watch-group-tag label, #new-watch-form #watch-group-tag input {
|
||||
margin: 0; }
|
||||
#new-watch-form #watch-group-tag input {
|
||||
flex: 1; }
|
||||
|
||||
#diff-col {
|
||||
padding-left: 40px; }
|
||||
@@ -1129,8 +1159,6 @@ footer {
|
||||
gap: 1em; }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%; }
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0; }
|
||||
@@ -1506,7 +1534,7 @@ ul {
|
||||
padding: 0.3rem; }
|
||||
#quick-watch-processor-type ul li {
|
||||
list-style: none;
|
||||
font-size: 0.8rem; }
|
||||
font-size: 0.9rem; }
|
||||
#quick-watch-processor-type ul li > * {
|
||||
display: inline-block; }
|
||||
|
||||
@@ -1535,3 +1563,12 @@ ul {
|
||||
height: 21px;
|
||||
padding: 2px;
|
||||
vertical-align: middle; }
|
||||
|
||||
#realtime-conn-error {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 30px;
|
||||
background: var(--color-warning);
|
||||
padding: 10px;
|
||||
font-size: 0.8rem;
|
||||
color: #fff; }
|
||||
|
||||
@@ -13,6 +13,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import secrets
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
@@ -45,7 +46,7 @@ class ChangeDetectionStore:
|
||||
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
|
||||
self.__data = App.model()
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.json_store_path = os.path.join(self.datastore_path, "url-watches.json")
|
||||
logger.info(f"Datastore path is '{self.json_store_path}'")
|
||||
self.needs_write = False
|
||||
self.start_time = time.time()
|
||||
@@ -118,14 +119,12 @@ class ChangeDetectionStore:
|
||||
test_list = self.proxy_list
|
||||
|
||||
# Helper to remove password protection
|
||||
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
|
||||
password_reset_lockfile = os.path.join(self.datastore_path, "removepassword.lock")
|
||||
if path.isfile(password_reset_lockfile):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
unlink(password_reset_lockfile)
|
||||
|
||||
if not 'app_guid' in self.__data:
|
||||
import os
|
||||
import sys
|
||||
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
||||
self.__data['app_guid'] = "test-" + str(uuid_builder.uuid4())
|
||||
else:
|
||||
@@ -386,9 +385,9 @@ class ChangeDetectionStore:
|
||||
return new_uuid
|
||||
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = "{}/last-screenshot.png".format(output_path)
|
||||
elements_index_filename = "{}/elements.deflate".format(output_path)
|
||||
output_path = os.path.join(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = os.path.join(output_path, "last-screenshot.png")
|
||||
elements_index_filename = os.path.join(output_path, "elements.deflate")
|
||||
if path.isfile(screenshot_filename) and path.isfile(elements_index_filename) :
|
||||
return True
|
||||
|
||||
@@ -412,11 +411,7 @@ class ChangeDetectionStore:
|
||||
# system was out of memory, out of RAM etc
|
||||
with open(self.json_store_path+".tmp", 'w') as json_file:
|
||||
# Use compact JSON in production for better performance
|
||||
debug_mode = os.environ.get('CHANGEDETECTION_DEBUG', 'false').lower() == 'true'
|
||||
if debug_mode:
|
||||
json.dump(data, json_file, indent=4)
|
||||
else:
|
||||
json.dump(data, json_file, separators=(',', ':'))
|
||||
json.dump(data, json_file, indent=2)
|
||||
os.replace(self.json_store_path+".tmp", self.json_store_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Error writing JSON!! (Main JSON file save was skipped) : {str(e)}")
|
||||
@@ -478,7 +473,7 @@ class ChangeDetectionStore:
|
||||
|
||||
# Load from external config file
|
||||
if path.isfile(proxy_list_file):
|
||||
with open("{}/proxies.json".format(self.datastore_path)) as f:
|
||||
with open(os.path.join(self.datastore_path, "proxies.json")) as f:
|
||||
proxy_list = json.load(f)
|
||||
|
||||
# Mapping from UI config if available
|
||||
@@ -736,10 +731,10 @@ class ChangeDetectionStore:
|
||||
logger.critical(f"Applying update_{update_n}")
|
||||
# Wont exist on fresh installs
|
||||
if os.path.exists(self.json_store_path):
|
||||
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
|
||||
shutil.copyfile(self.json_store_path, os.path.join(self.datastore_path, f"url-watches-before-{update_n}.json"))
|
||||
|
||||
try:
|
||||
update_method = getattr(self, "update_{}".format(update_n))()
|
||||
update_method = getattr(self, f"update_{update_n}")()
|
||||
except Exception as e:
|
||||
logger.error(f"Error while trying update_{update_n}")
|
||||
logger.error(e)
|
||||
|
||||
@@ -74,7 +74,7 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{watch_tag}}' }}</code></td>
|
||||
<td>The watch label / tag</td>
|
||||
<td>The watch group / tag</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{{ '{{preview_url}}' }}</code></td>
|
||||
|
||||
@@ -236,6 +236,7 @@
|
||||
<script src="{{url_for('static_content', group='js', filename='toggle-theme.js')}}" defer></script>
|
||||
|
||||
<div id="checking-now-fixed-tab" style="display: none;"><span class="spinner"></span><span> Checking now</span></div>
|
||||
<div id="realtime-conn-error" style="display:none">Offline</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
import asyncio
|
||||
import socketio
|
||||
from aiohttp import web
|
||||
|
||||
SOCKETIO_URL = 'ws://localhost.localdomain:5005'
|
||||
SOCKETIO_PATH = "/socket.io"
|
||||
NUM_CLIENTS = 1
|
||||
|
||||
clients = []
|
||||
shutdown_event = asyncio.Event()
|
||||
|
||||
class WatchClient:
|
||||
def __init__(self, client_id: int):
|
||||
self.client_id = client_id
|
||||
self.i_got_watch_update_event = False
|
||||
self.sio = socketio.AsyncClient(reconnection_attempts=50, reconnection_delay=1)
|
||||
|
||||
@self.sio.event
|
||||
async def connect():
|
||||
print(f"[Client {self.client_id}] Connected")
|
||||
|
||||
@self.sio.event
|
||||
async def disconnect():
|
||||
print(f"[Client {self.client_id}] Disconnected")
|
||||
|
||||
@self.sio.on("watch_update")
|
||||
async def on_watch_update(watch):
|
||||
self.i_got_watch_update_event = True
|
||||
print(f"[Client {self.client_id}] Received update: {watch}")
|
||||
|
||||
async def run(self):
|
||||
try:
|
||||
await self.sio.connect(SOCKETIO_URL, socketio_path=SOCKETIO_PATH, transports=["websocket", "polling"])
|
||||
await self.sio.wait()
|
||||
except Exception as e:
|
||||
print(f"[Client {self.client_id}] Connection error: {e}")
|
||||
|
||||
async def handle_check(request):
|
||||
all_received = all(c.i_got_watch_update_event for c in clients)
|
||||
result = "yes" if all_received else "no"
|
||||
print(f"Received HTTP check — returning '{result}'")
|
||||
shutdown_event.set() # Signal shutdown
|
||||
return web.Response(text=result)
|
||||
|
||||
async def start_http_server():
|
||||
app = web.Application()
|
||||
app.add_routes([web.get('/did_all_clients_get_watch_update', handle_check)])
|
||||
runner = web.AppRunner(app)
|
||||
await runner.setup()
|
||||
site = web.TCPSite(runner, '0.0.0.0', 6666)
|
||||
await site.start()
|
||||
|
||||
async def main():
|
||||
#await start_http_server()
|
||||
|
||||
for i in range(NUM_CLIENTS):
|
||||
client = WatchClient(i)
|
||||
clients.append(client)
|
||||
asyncio.create_task(client.run())
|
||||
|
||||
await shutdown_event.wait()
|
||||
|
||||
print("Shutting down...")
|
||||
# Graceful disconnect
|
||||
for c in clients:
|
||||
await c.sio.disconnect()
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt:
|
||||
print("Interrupted")
|
||||
@@ -236,39 +236,41 @@ def test_group_tag_notification(client, live_server, measure_memory_usage):
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_limit_tag_ui(client, live_server, measure_memory_usage):
|
||||
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
urls=[]
|
||||
test_url = url_for('test_random_content_endpoint', _external=True)
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?x="+str(i)+" test-tag")
|
||||
|
||||
for i in range(20):
|
||||
urls.append(test_url+"?non-grouped="+str(i))
|
||||
|
||||
res = client.post(
|
||||
# A space can label the tag, only the first one will have a tag
|
||||
client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": "\r\n".join(urls)},
|
||||
data={"urls": f"{test_url} test-tag\r\n{test_url}"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"40 Imported" in res.data
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
assert tag_uuid
|
||||
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'test-tag' in res.data
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# All should be here
|
||||
assert res.data.count(b'processor-text_json_diff') == 40
|
||||
# Should be both unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 2
|
||||
|
||||
tag_uuid = get_UUID_for_tag_name(client, name="test-tag")
|
||||
|
||||
res = client.get(url_for("watchlist.index", tag=tag_uuid))
|
||||
# Now we recheck only the tag
|
||||
client.get(url_for('ui.mark_all_viewed', tag=tag_uuid), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
with open('/tmp/fuck.html', 'wb') as f:
|
||||
f.write(res.data)
|
||||
# Should be only 1 unviewed
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert res.data.count(b' unviewed ') == 1
|
||||
|
||||
|
||||
# Just a subset should be here
|
||||
assert b'test-tag' in res.data
|
||||
assert res.data.count(b'processor-text_json_diff') == 20
|
||||
assert b"object at" not in res.data
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
|
||||
@@ -79,3 +79,48 @@ def test_consistent_history(client, live_server, measure_memory_usage):
|
||||
json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json')
|
||||
with open(json_db_file, 'r') as f:
|
||||
assert '"default"' not in f.read(), "'default' probably shouldnt be here, it came from when the 'default' Watch vars were accidently being saved"
|
||||
|
||||
|
||||
def test_check_text_history_view(client, live_server):
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-one</html>")
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick it up
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Set second version, Make a change
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-two</html>")
|
||||
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
assert b'test-one' in res.data
|
||||
assert b'test-two' in res.data
|
||||
|
||||
# Set third version, Make a change
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write("<html>test-three</html>")
|
||||
|
||||
client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# It should remember the last viewed time, so the first difference is not shown
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
assert b'test-three' in res.data
|
||||
assert b'test-two' in res.data
|
||||
assert b'test-one' not in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -419,13 +419,20 @@ def check_json_ext_filter(json_filter, client, live_server):
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
res = client.get(url_for("ui.ui_views.preview_page", uuid="first"))
|
||||
|
||||
# We should never see 'ForSale' because we are selecting on 'Sold' in the rule,
|
||||
# But we should know it triggered ('unviewed' assert above)
|
||||
assert b'ForSale' not in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
|
||||
# And the difference should have both?
|
||||
|
||||
res = client.get(url_for("ui.ui_views.diff_history_page", uuid="first"))
|
||||
assert b'ForSale' in res.data
|
||||
assert b'Sold' in res.data
|
||||
|
||||
res = client.get(url_for("ui.form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
@@ -5,8 +5,22 @@ from .util import live_server_setup, wait_for_all_checks
|
||||
from .. import strtobool
|
||||
|
||||
|
||||
# def test_setup(client, live_server, measure_memory_usage):
|
||||
# live_server_setup(live_server) # Setup on conftest per function
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<head><title>head title</title></head>
|
||||
<body>
|
||||
Some initial text<br>
|
||||
<p>Which is across multiple lines</p>
|
||||
<br>
|
||||
So let's see what happens. <br>
|
||||
<span class="foobar-detection" style='display:none'></span>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def test_bad_access(client, live_server, measure_memory_usage):
|
||||
|
||||
@@ -118,3 +132,33 @@ def test_xss(client, live_server, measure_memory_usage):
|
||||
assert b"<img src=x onerror=alert(" not in res.data
|
||||
assert b"<img" in res.data
|
||||
|
||||
|
||||
def test_xss_watch_last_error(client, live_server, measure_memory_usage):
|
||||
set_original_response()
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("imports.import_page"),
|
||||
data={"urls": url_for('test_endpoint', _external=True)},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
res = client.post(
|
||||
url_for("ui.ui_edit.edit_page", uuid="first"),
|
||||
data={
|
||||
"include_filters": '<a href="https://foobar"></a><script>alert(123);</script>',
|
||||
"url": url_for('test_endpoint', _external=True),
|
||||
'fetch_backend': "html_requests"
|
||||
},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
wait_for_all_checks(client)
|
||||
res = client.get(url_for("watchlist.index"))
|
||||
|
||||
assert b"<script>alert(123);</script>" not in res.data # this text should be there
|
||||
assert b'<a href="https://foobar"></a><script>alert(123);</script>' in res.data
|
||||
assert b"https://foobar" in res.data # this text should be there
|
||||
|
||||
|
||||
@@ -51,6 +51,9 @@ class TestJinja2SSTI(unittest.TestCase):
|
||||
for attempt in attempt_list:
|
||||
self.assertEqual(len(safe_jinja.render(attempt)), 0, f"string test '{attempt}' is correctly empty")
|
||||
|
||||
def test_jinja2_escaped_html(self):
|
||||
x = safe_jinja.render_fully_escaped('woo <a href="https://google.com">dfdfd</a>')
|
||||
self.assertEqual(x, "woo <a href="https://google.com">dfdfd</a>")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -69,6 +69,16 @@ services:
|
||||
# Maximum height of screenshots, default is 16000 px, screenshots will be clipped to this if exceeded.
|
||||
# RAM usage will be higher if you increase this.
|
||||
# - SCREENSHOT_MAX_HEIGHT=16000
|
||||
#
|
||||
# HTTPS SSL Mode for webserver, unset both of these, you may need to volume mount these files also.
|
||||
# ./cert.pem:/app/cert.pem and ./privkey.pem:/app/privkey.pem
|
||||
# - SSL_CERT_FILE=cert.pem
|
||||
# - SSL_PRIVKEY_FILE=privkey.pem
|
||||
#
|
||||
# LISTEN_HOST / "host", Same as -h
|
||||
# - LISTEN_HOST=::
|
||||
# - LISTEN_HOST=0.0.0.0
|
||||
|
||||
|
||||
# Comment out ports: when using behind a reverse proxy , enable networks: etc.
|
||||
ports:
|
||||
|
||||
Reference in New Issue
Block a user