Compare commits

..

1 Commits

Author SHA1 Message Date
dgtlmoon
c0381b8c60 Adding patch for armv7 2025-07-11 19:27:37 +02:00
36 changed files with 2037 additions and 465 deletions

View File

@@ -18,7 +18,6 @@ RUN \
libxslt-dev \
openssl-dev \
python3-dev \
file \
zip \
zlib-dev && \
apk add --update --no-cache \

View File

@@ -54,8 +54,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
locales \
# For pdftohtml
poppler-utils \
# favicon type detection and other uses
file \
zlib1g \
&& apt-get clean && rm -rf /var/lib/apt/lists/*

View File

@@ -2,7 +2,7 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
__version__ = '0.50.7'
__version__ = '0.50.5'
from changedetectionio.strtobool import strtobool
from json.decoder import JSONDecodeError

View File

@@ -214,17 +214,8 @@ class WatchFavicon(Resource):
favicon_filename = watch.get_favicon_filename()
if favicon_filename:
try:
import magic
mime = magic.from_file(
os.path.join(watch.watch_data_dir, favicon_filename),
mime=True
)
except ImportError:
# Fallback, no python-magic
import mimetypes
mime, encoding = mimetypes.guess_type(favicon_filename)
import mimetypes
mime, encoding = mimetypes.guess_type(favicon_filename)
response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename))
response.headers['Content-type'] = mime
response.headers['Cache-Control'] = 'max-age=300, must-revalidate' # Cache for 5 minutes, then revalidate

View File

@@ -1,7 +1,7 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_simple_field, render_button, render_time_schedule_form %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %}
{% from '_common_fields.html' import render_common_settings_form %}
<script>
const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="global-settings")}}";
@@ -23,7 +23,6 @@
<li class="tab"><a href="#fetching">Fetching</a></li>
<li class="tab"><a href="#filters">Global Filters</a></li>
<li class="tab"><a href="#ui-options">UI Options</a></li>
<li class="tab"><a href="#ai-options"><i data-feather="aperture" style="width: 14px; height: 14px; margin-right: 4px;"></i> AI</a></li>
<li class="tab"><a href="#api">API</a></li>
<li class="tab"><a href="#timedate">Time &amp Date</a></li>
<li class="tab"><a href="#proxies">CAPTCHA &amp; Proxies</a></li>
@@ -257,29 +256,6 @@ nav
{{ render_checkbox_field(form.application.form.ui.form.socket_io_enabled, class="socket_io_enabled") }}
<span class="pure-form-message-inline">Realtime UI Updates Enabled - (Restart required if this is changed)</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.ui.form.favicons_enabled, class="") }}
<span class="pure-form-message-inline">Enable or Disable Favicons next to the watch list</span>
</div>
</div>
<div class="tab-pane-inner" id="ai-options">
<p><strong>New:</strong> click here (link to changedetection.io tutorial page) find out how to setup and example</p>
<br>
key fields should be some password type field so you can see its set but doesnt contain the key on view and doesnt lose it on save<br>
<div class="pure-control-group inline-radio">
{{ render_simple_field(form.application.form.ai.form.LLM_backend) }}
<span class="pure-form-message-inline">Preferred LLM connection</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.ai.form.API_keys.form.openai) }}
<span class="pure-form-message-inline">Go here to read more about OpenAI integration</span>
</div>
<div class="pure-control-group">
{{ render_checkbox_field(form.application.form.ai.form.API_keys.form.gemini) }}
<span class="pure-form-message-inline">Go here to read more about Google Gemini integration</span>
</div>
</div>
<div class="tab-pane-inner" id="proxies">
<div id="recommended-proxy">

View File

@@ -25,7 +25,7 @@
<div class="tabs collapsable">
<ul>
<li class="tab" id=""><a href="#general">General</a></li>
<li class="tab"><a href="#filters-and-triggers">AI, Filters &amp; Triggers</a></li>
<li class="tab"><a href="#filters-and-triggers">Filters &amp; Triggers</a></li>
{% if extra_tab_content %}
<li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li>
{% endif %}

View File

@@ -312,27 +312,8 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
'''For when viewing the "preview" of the rendered text from inside of Edit'''
from flask import jsonify
from changedetectionio.processors.text_json_diff import prepare_filter_prevew
watch = datastore.data["watching"].get(uuid)
if not watch:
return jsonify({
"error": "Watch not found",
"code": 400
}), 400
if not watch.history_n:
return jsonify({
"error": "Watch has empty history, at least one fetch of the page is required.",
"code": 400
}), 400
#
try:
result = prepare_filter_prevew(watch_uuid=uuid, form_data=request.form, datastore=datastore)
return jsonify(result)
except Exception as e:
return abort(500, str(e))
result = prepare_filter_prevew(watch_uuid=uuid, form_data=request.form, datastore=datastore)
return jsonify(result)
@edit_blueprint.route("/highlight_submit_ignore_url", methods=['POST'])
@login_optionally_required

View File

@@ -212,14 +212,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe
add_paused = request.form.get('edit_and_watch_submit_button') != None
processor = request.form.get('processor', 'text_json_diff')
extras = {'paused': add_paused, 'processor': processor}
LLM_prompt = request.form.get('LLM_prompt', '').strip()
if LLM_prompt:
extras['LLM_prompt'] = LLM_prompt
extras['LLM_send_type'] = request.form.get('LLM_send_type', 'text')
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras=extras)
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor})
if new_uuid:
if add_paused:

View File

@@ -5,7 +5,12 @@
<script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script>
<script>let nowtimeserver={{ now_time_server }};</script>
<script>let favicon_baseURL="{{ url_for('static_content', group='favicon', filename="PLACEHOLDER")}}";</script>
<script>
// Initialize Feather icons after the page loads
document.addEventListener('DOMContentLoaded', function() {
feather.replace();
});
</script>
<style>
.checking-now .last-checked {
background-image: linear-gradient(to bottom, transparent 0%, rgba(0,0,0,0.05) 40%, rgba(0,0,0,0.1) 100%);
@@ -26,12 +31,8 @@
{{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }}
</div>
<div id="watch-group-tag">
<i data-feather="tag" style="width: 14px; height: 14px; stroke: white; margin-right: 4px;"></i>
{{ render_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="Watch group / tag", class="transparent-field") }}
</div>
{%- include 'edit/llm_prompt.html' -%}
<div id="quick-watch-processor-type">
{{ render_simple_field(form.processor) }}
</div>
@@ -80,13 +81,10 @@
{%- if any_has_restock_price_processor -%}
{%- set cols_required = cols_required + 1 -%}
{%- endif -%}
{%- set ui_settings = datastore.data['settings']['application']['ui'] -%}
<div id="watch-table-wrapper">
{%- set table_classes = [
'favicon-enabled' if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] else 'favicon-not-enabled',
] -%}
<table class="pure-table pure-table-striped watch-table {{ table_classes | reject('equalto', '') | join(' ') }}">
<table class="pure-table pure-table-striped watch-table">
<thead>
<tr>
{%- set link_order = "desc" if sort_order == 'asc' else "asc" -%}
@@ -116,7 +114,7 @@
{%- for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) -%}
{%- set checking_now = is_checking_now(watch) -%}
{%- set history_n = watch.history_n -%}
{%- set favicon = watch.get_favicon_filename() -%}
{%- set has_favicon = watch.get_favicon_filename() -%}
{# Mirror in changedetectionio/static/js/realtime.js for the frontend #}
{%- set row_classes = [
loop.cycle('pure-table-odd', 'pure-table-even'),
@@ -125,7 +123,7 @@
'paused' if watch.paused is defined and watch.paused != False else '',
'unviewed' if watch.has_unviewed else '',
'has-restock-info' if watch.has_restock_info else 'no-restock-info',
'has-favicon' if favicon else '',
'has-favicon' if has_favicon else '',
'in-stock' if watch.has_restock_info and watch['restock']['in_stock'] else '',
'not-in-stock' if watch.has_restock_info and not watch['restock']['in_stock'] else '',
'queued' if watch.uuid in queued_uuids else '',
@@ -147,11 +145,9 @@
<td class="title-col inline">
<div class="flex-wrapper">
{% if 'favicons_enabled' not in ui_settings or ui_settings['favicons_enabled'] %}
<div>{# A page might have hundreds of these images, set IMG options for lazy loading, don't set SRC if we dont have it so it doesnt fetch the placeholder' #}
<img alt="Favicon thumbnail" class="favicon" loading="lazy" decoding="async" fetchpriority="low" {% if favicon %} src="{{url_for('static_content', group='favicon', filename=watch.uuid)}}" {% else %} src='data:image/svg+xml;utf8,%3Csvg xmlns="http://www.w3.org/2000/svg" width="7.087" height="7.087" viewBox="0 0 7.087 7.087"%3E%3Ccircle cx="3.543" cy="3.543" r="3.279" stroke="%23e1e1e1" stroke-width="0.45" fill="none" opacity="0.74"/%3E%3C/svg%3E' {% endif %} />
<img alt="Favicon thumbnail" style="display: none;" class="favicon" loading="lazy" decoding="async" fetchpriority="low" {% if has_favicon %} src="{{url_for('static_content', group='favicon', filename=watch.uuid)}}" {% else %} src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///ywAAAAAAQABAAACAUwAOw=="{% endif %} />
</div>
{% endif %}
<div>
<span class="watch-title">
{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}&nbsp;<a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}">&nbsp;</a>

View File

@@ -438,17 +438,8 @@ def changedetection_app(config=None, datastore_o=None):
favicon_filename = watch.get_favicon_filename()
if favicon_filename:
try:
import magic
mime = magic.from_file(
os.path.join(watch.watch_data_dir, favicon_filename),
mime=True
)
except ImportError:
# Fallback, no python-magic
import mimetypes
mime, encoding = mimetypes.guess_type(favicon_filename)
import mimetypes
mime, encoding = mimetypes.guess_type(favicon_filename)
response = make_response(send_from_directory(watch.watch_data_dir, favicon_filename))
response.headers['Content-type'] = mime
response.headers['Cache-Control'] = 'max-age=300, must-revalidate' # Cache for 5 minutes, then revalidate

View File

@@ -55,18 +55,6 @@ valid_method = {
default_method = 'GET'
allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False'))
LLM_example_texts = ['Tell me simply "Price, In stock"',
'Give me a list of all products for sale in this text',
'Tell me simply "Yes" "No" or "Maybe" if you think the weather outlook is good for a 4-day small camping trip',
'Look at this restaurant menu and only give me list of meals you think are good for type 2 diabetics, if nothing is found just say "nothing"',
]
LLM_send_type_choices = [('text', 'Plain text after filters'),
('above_fold_text', 'Text above the fold'),
('Screenshot', 'Screenshot / Selection'),
('HTML', 'HTML Source')
]
class StringListField(StringField):
widget = widgets.TextArea()
@@ -527,15 +515,11 @@ class ValidateCSSJSONXPATHInput(object):
class quickWatchForm(Form):
from . import processors
import random
url = fields.URLField('URL', validators=[validateURL()])
tags = StringTagUUID('Group tag', [validators.Optional()])
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff")
LLM_prompt = TextAreaField(u'AI Prompt', [validators.Optional()], render_kw={"placeholder": f'Example, "{random.choice(LLM_example_texts)}"'})
LLM_send_type = RadioField(u'LLM Send', choices=LLM_send_type_choices, default="text")
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
@@ -543,7 +527,6 @@ class quickWatchForm(Form):
# Common to a single watch and the global settings
class commonSettingsForm(Form):
from . import processors
import random
def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs):
super().__init__(formdata, obj, prefix, data, meta, **kwargs)
@@ -561,8 +544,6 @@ class commonSettingsForm(Form):
timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()])
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")])
LLM_prompt = TextAreaField(u'AI Prompt', [validators.Optional()], render_kw={"placeholder": f'Example, "{random.choice(LLM_example_texts)}"'})
LLM_send_type = RadioField(u'LLM Send', choices=LLM_send_type_choices, default="text")
class importForm(Form):
from . import processors
@@ -759,30 +740,6 @@ class globalSettingsRequestForm(Form):
class globalSettingsApplicationUIForm(Form):
open_diff_in_new_tab = BooleanField("Open 'History' page in a new tab", default=True, validators=[validators.Optional()])
socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()])
favicons_enabled = BooleanField('Favicons Enabled', default=True, validators=[validators.Optional()])
class globalSettingsApplicationAIKeysForm(Form):
openai = StringField('OpenAI Key',
validators=[validators.Optional()],
render_kw={"placeholder": 'xxxxxxxxx'}
)
gemini = StringField('Google Gemini Key',
validators=[validators.Optional()],
render_kw={"placeholder": 'ooooooooo'}
)
class globalSettingsApplicationAIForm(Form):
#@todo use only configured types?
LLM_backend = RadioField(u'LLM Backend',
choices=[('openai', 'Open AI'), ('gemini', 'Gemini')],
default="text")
# So that we can pass this to our LLM/__init__.py as a keys dict
API_keys = FormField(globalSettingsApplicationAIKeysForm)
# datastore.data['settings']['application']..
class globalSettingsApplicationForm(commonSettingsForm):
@@ -816,8 +773,6 @@ class globalSettingsApplicationForm(commonSettingsForm):
message="Should contain zero or more attempts")])
ui = FormField(globalSettingsApplicationUIForm)
ai = FormField(globalSettingsApplicationAIForm)
class globalSettingsForm(Form):
# Define these as FormFields/"sub forms", this way it matches the JSON storage

View File

@@ -63,12 +63,7 @@ class model(dict):
'ui': {
'open_diff_in_new_tab': True,
'socket_io_enabled': True,
'favicons_enabled': True
},
'ai': {
'openai_key': None,
'gemini_key': None
}
}
}
}

View File

@@ -38,9 +38,6 @@ class watch_base(dict):
'last_error': False,
'last_notification_error': None,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'LLM_prompt': None,
'LLM_send_type': None,
'LLM_backend': None,
'method': 'GET',
'notification_alert_count': 0,
'notification_body': None,

View File

@@ -1,64 +0,0 @@
import importlib
from langchain_core.messages import SystemMessage, HumanMessage
SYSTEM_MESSAGE = (
"You are a text analyser who will attempt to give the most concise information "
"to the request, the information should be returned in a way that if I ask you again "
"I should get the same answer if the outcome is the same. The goal is to cut down "
"or reduce the text changes from you when i ask the same question about similar content "
"Always list items in exactly the same order and wording as found in the source text. "
)
class LLM_integrate:
PROVIDER_MAP = {
"openai": ("langchain_openai", "ChatOpenAI"),
"azure": ("langchain_community.chat_models", "AzureChatOpenAI"),
"gemini": ("langchain_google_genai", "ChatGoogleGenerativeAI")
}
def __init__(self, api_keys: dict):
"""
api_keys = {
"openai": "sk-xxx",
"azure": "AZURE_KEY",
"gemini": "GEMINI_KEY"
}
"""
self.api_keys = api_keys
def run(self, provider: str, model: str, message: str):
module_name, class_name = self.PROVIDER_MAP[provider]
# Import the class dynamically
module = importlib.import_module(module_name)
LLMClass = getattr(module, class_name)
# Create the LLM object
llm_kwargs = {}
if provider == "openai":
llm_kwargs = dict(api_key=self.api_keys.get("openai", ''),
model=model,
# https://api.python.langchain.com/en/latest/chat_models/langchain_openai.chat_models.base.ChatOpenAI.html#langchain_openai.chat_models.base.ChatOpenAI.temperature
temperature=0 # most deterministic,
)
elif provider == "azure":
llm_kwargs = dict(
api_key=self.api_keys["azure"],
azure_endpoint="https://<your-endpoint>.openai.azure.com",
deployment_name=model
)
elif provider == "gemini":
llm_kwargs = dict(api_key=self.api_keys.get("gemini"), model=model)
llm = LLMClass(**llm_kwargs)
# Build your messages
messages = [
SystemMessage(content=SYSTEM_MESSAGE),
HumanMessage(content=message)
]
# Run the model asynchronously
result = llm.invoke(messages)
return result.content

View File

@@ -1,6 +1,5 @@
from abc import abstractmethod
from changedetectionio.content_fetchers.base import Fetcher
from changedetectionio.processors.LLM import LLM_integrate
from changedetectionio.strtobool import strtobool
from copy import deepcopy
from loguru import logger

View File

@@ -7,7 +7,7 @@ import re
import urllib3
from changedetectionio.conditions import execute_ruleset_against_all_plugins
from changedetectionio.processors import difference_detection_processor, LLM_integrate
from changedetectionio.processors import difference_detection_processor
from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text, TRANSLATE_WHITESPACE_TABLE
from changedetectionio import html_tools, content_fetchers
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
@@ -293,30 +293,6 @@ class perform_site_check(difference_detection_processor):
# we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here.
stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n")
stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower()))
### OPENAI?
# And here we run LLM integration based on the content we received
LLM_keys = self.datastore.data['settings']['application']['ai'].get('API_keys', {})
if watch.get('LLM_prompt') and stripped_text_from_html and LLM_keys:
response = ""
try:
integrator = LLM_integrate(api_keys=LLM_keys)
response = integrator.run(
provider="openai",
model="gpt-4.1", #gpt-4-turbo
message=f"{watch.get('LLM_prompt')}\n----------- Content follows-----------\n\n{stripped_text_from_html}"
)
except Exception as e:
logger.critical(f"Error running LLM integration {str(e)} (type etc)")
raise(e)
x = 1
# todo is there something special when tokens are used up etc?
else:
stripped_text_from_html = response
# logger.trace("LLM done")
finally:
logger.debug("LLM request done (type etc)")
### CALCULATE MD5
# If there's text to ignore

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -21,7 +21,6 @@ function request_textpreview_update() {
namespace: 'watchEdit'
}).done(function (data) {
console.debug(data['duration'])
$('#error-text').text(data['duration']);
$('#filters-and-triggers #text-preview-before-inner').text(data['before_filter']);
$('#filters-and-triggers #text-preview-inner')
.text(data['after_filter'])
@@ -38,8 +37,9 @@ function request_textpreview_update() {
}).fail(function (error) {
if (error.statusText === 'abort') {
console.log('Request was aborted due to a new request being fired.');
} else {
$('#filters-and-triggers #text-preview-inner').text('There was an error communicating with the server.');
}
$('#error-text').text(error.responseJSON['error']);
})
}

File diff suppressed because one or more lines are too long

View File

@@ -3,16 +3,15 @@
"version": "0.0.3",
"description": "",
"main": "index.js",
"engines": {
"node": ">=18.0.0"
},
"scripts": {
"watch": "sass --watch scss:. --style=compressed --no-source-map",
"build": "sass scss:. --style=compressed --no-source-map"
"watch": "node-sass -w scss -o .",
"build": "node-sass scss -o ."
},
"author": "Leigh Morresi / Web Technologies s.r.o.",
"license": "Apache",
"author": "",
"license": "ISC",
"dependencies": {
"sass": "^1.77.8"
"node-sass": "^7.0.0",
"tar": "^6.1.9",
"trim-newlines": "^3.0.1"
}
}

View File

@@ -1,4 +1,4 @@
@use "parts/variables";
@import "parts/_variables.scss";
#diff-ui {

View File

@@ -1,5 +1,3 @@
@use "_llm-prompt";
ul#conditions_match_logic {
list-style: none;
input, label, li {

View File

@@ -64,17 +64,17 @@ body.proxy-check-active {
#recommended-proxy {
display: grid;
gap: 2rem;
padding-bottom: 1em;
@media (min-width: 991px) {
grid-template-columns: repeat(2, 1fr);
}
@media (min-width: 991px) {
grid-template-columns: repeat(2, 1fr);
}
> div {
border: 1px #aaa solid;
border-radius: 4px;
padding: 1em;
}
padding-bottom: 1em;
}
#extra-proxies-setting {

View File

@@ -1,42 +1,27 @@
.watch-table {
&.favicon-not-enabled {
tr {
.favicon {
display: none;
}
}
}
&.favicon-enabled {
tr {
/* make the icons and the text inline-ish */
td.inline.title-col {
.flex-wrapper {
display: flex;
align-items: center;
gap: 4px;
}
}
}
}
td,
th {
vertical-align: middle;
}
tr.has-favicon {
img.favicon {
display: inline-block !important;
}
&.unviewed {
img.favicon {
opacity: 1.0 !important;
}
}
}
.status-icons {
white-space: nowrap;
display: flex;
align-items: center; /* Vertical centering */
gap: 4px; /* Space between image and text */
display: flex;
align-items: center; /* Vertical centering */
gap: 4px; /* Space between image and text */
> * {
vertical-align: middle;
}
@@ -70,23 +55,33 @@
padding-right: 4px;
}
// Reserved for future use
/* &.thumbnail-type-screenshot {
tr.has-favicon {
td.inline.title-col {
img.thumbnail {
background-color: #fff; !* fallback bg for SVGs without bg *!
border-radius: 4px; !* subtle rounded corners *!
border: 1px solid #ddd; !* light border for contrast *!
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15); !* soft shadow *!
filter: contrast(1.05) saturate(1.1) drop-shadow(0 0 0.5px rgba(0, 0, 0, 0.2));
object-fit: cover; !* crop/fill if needed *!
opacity: 0.8;
max-width: 30px;
max-height: 30px;
height: 30px;
}
tr.has-favicon {
td.inline.title-col {
.flex-wrapper {
display: flex;
align-items: center;
gap: 4px;
}
}
}
// Reserved for future use
/* &.thumbnail-type-screenshot {
tr.has-favicon {
td.inline.title-col {
img.thumbnail {
background-color: #fff; !* fallback bg for SVGs without bg *!
border-radius: 4px; !* subtle rounded corners *!
border: 1px solid #ddd; !* light border for contrast *!
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15); !* soft shadow *!
filter: contrast(1.05) saturate(1.1) drop-shadow(0 0 0.5px rgba(0, 0, 0, 0.2));
object-fit: cover; !* crop/fill if needed *!
opacity: 0.8;
max-width: 30px;
max-height: 30px;
height: 30px;
}
}
}*/
}
}*/
}

View File

@@ -1,59 +0,0 @@
#form-quick-watch-add #openai-prompt {
color: var(--color-white);
}
#llm-prompt-all {
.label {
display: block !important;
}
textarea {
white-space: pre-wrap;
overflow-wrap: break-word;
word-wrap: break-word; /* legacy support */
font-size: 13px;
}
ul {
list-style: none;
padding-left: 0px;
li {
display: flex;
align-items: center;
gap: 0.5em;
padding-bottom: 0.3em;
> * {
margin: 0px;
padding: 0px;
}
label {
font-weight: normal;
}
}
}
}
@media (min-width: 768px) {
#llm-prompt-all {
display: grid;
grid-template-columns: 1fr auto auto;
column-gap: 1.5rem;
align-items: start;
font-size: 0.9rem;
padding: 0.3rem;
#llm-prompt {
/* ensure the textarea stretches horizontally */
width: 100%;
textarea {
width: 100%;
box-sizing: border-box;
}
}
}
}

View File

@@ -1,4 +1,4 @@
@use "minitabs";
@import "minitabs";
body.preview-text-enabled {

View File

@@ -34,17 +34,11 @@ $grid-gap: 0.5rem;
.last-checked {
margin-left: calc($grid-col-checkbox + $grid-gap);
> span {
vertical-align: middle;
}
}
.last-changed {
margin-left: calc($grid-col-checkbox + $grid-gap);
}
.last-checked::before {
color: var(--color-text);
content: "Last Checked ";
@@ -173,6 +167,6 @@ $grid-gap: 0.5rem;
}
}
.pure-table td {
padding: 3px !important;
padding: 5px !important;
}
}

View File

@@ -2,24 +2,23 @@
* -- BASE STYLES --
*/
@use "parts/variables";
@use "parts/arrows";
@use "parts/browser-steps";
@use "parts/extra_proxies";
@use "parts/extra_browsers";
@use "parts/pagination";
@use "parts/spinners";
@use "parts/darkmode";
@use "parts/menu";
@use "parts/love";
@use "parts/preview_text_filter";
@use "parts/watch_table";
@use "parts/watch_table-mobile";
@use "parts/edit";
@use "parts/conditions_table";
@use "parts/lister_extra";
@use "parts/socket";
@use "parts/visualselector";
@import "parts/_arrows";
@import "parts/_browser-steps";
@import "parts/_extra_proxies";
@import "parts/_extra_browsers";
@import "parts/_pagination";
@import "parts/_spinners";
@import "parts/_variables";
@import "parts/_darkmode";
@import "parts/_menu";
@import "parts/_love";
@import "parts/preview_text_filter";
@import "parts/_watch_table";
@import "parts/_watch_table-mobile";
@import "parts/_edit";
@import "parts/_conditions_table";
@import "parts/_lister_extra";
@import "parts/_socket";
body {
@@ -188,15 +187,9 @@ code {
@extend .inline-tag;
}
@media (min-width: 768px) {
.box {
margin: 0 1em !important;
}
}
.box {
max-width: 100%;
margin: 0 0.3em;
margin: 0 1em;
flex-direction: column;
display: flex;
justify-content: center;
@@ -792,9 +785,7 @@ textarea::placeholder {
border-top-left-radius: 5px;
border-top-right-radius: 5px;
background-color: var(--color-background-tab);
svg {
stroke: var(--color-text-tab);
}
&:not(.active) {
&:hover {
background-color: var(--color-background-tab-hover);
@@ -804,13 +795,11 @@ textarea::placeholder {
&.active,
:target {
background-color: var(--color-background);
a {
color: var(--color-text-tab-active);
font-weight: bold;
}
svg {
stroke: var(--color-text-tab-active);
}
}
a {
@@ -962,6 +951,8 @@ ul {
}
}
@import "parts/_visualselector";
#webdriver_delay {
width: 5em;
}
@@ -1079,23 +1070,17 @@ ul {
#quick-watch-processor-type {
ul#processor {
color: #fff;
padding-left: 0px;
color: #fff;
ul {
padding: 0.3rem;
li {
list-style: none;
font-size: 0.9rem;
display: grid;
grid-template-columns: auto 1fr;
align-items: center;
gap: 0.5rem;
margin-bottom: 0.5rem;
> * {
display: inline-block;
}
}
}
label, input {
padding: 0;
margin: 0;
}
}
.restock-label {

File diff suppressed because one or more lines are too long

View File

@@ -38,12 +38,6 @@
<script src="{{url_for('static_content', group='js', filename='socket.io.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='realtime.js')}}" defer></script>
{% endif %}
<script>
// Initialize Feather icons after the page loads
document.addEventListener('DOMContentLoaded', function() {
feather.replace();
});
</script>
</head>
<body class="">

View File

@@ -1,6 +1,6 @@
{% extends 'base.html' %}
{% block content %}
{% from '_helpers.html' import render_field, render_simple_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table %}
{% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table %}
{% from '_common_fields.html' import render_common_settings_form %}
<script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
<script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script>
@@ -52,7 +52,7 @@
<!-- should goto extra forms? -->
{% if watch['processor'] == 'text_json_diff' %}
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li>
<li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">AI, Filters &amp; Triggers</a></li>
<li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">Filters &amp; Triggers</a></li>
<li class="tab" id="conditions-tab"><a href="#conditions">Conditions</a></li>
{% endif %}
<li class="tab"><a href="#notifications">Notifications</a></li>
@@ -316,6 +316,7 @@ Math: {{ 1 + 1 }}") }}
</li>
</ul>
</div>
{% include "edit/include_subtract.html" %}
<div class="text-filtering border-fieldset">
<fieldset class="pure-group" id="text-filtering-type-options">
@@ -363,7 +364,6 @@ Math: {{ 1 + 1 }}") }}
</div>
</div>
</div>
<p id="error-text"></p>
</div>
</div>
</div>

View File

@@ -1,7 +1,4 @@
<div class="pure-control-group">
{%- include 'edit/llm_prompt.html' -%}
</div>
<div class="pure-control-group">
<div class="pure-control-group">
{% set field = render_field(form.include_filters,
rows=5,
placeholder=has_tag_filters_extra+"#example

View File

@@ -1,12 +0,0 @@
<div class="pure-control-group" id="ai-filter-options">
<div id="openai-prompt">
<div id="llm-prompt-all">
<div id="llm-prompt">
{{ render_simple_field(form.LLM_prompt, rows=5) }}
</div>
<div id="llm-send-type">
{{ render_simple_field(form.LLM_send_type) }}
</div>
</div>
</div>
</div>

View File

@@ -14,12 +14,9 @@ def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
#####################
res = client.post(
url_for("settings.settings_page"),
data={
"application-empty_pages_are_a_change": "",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_webdriver",
'application-ui-favicons_enabled': "y",
},
data={"application-empty_pages_are_a_change": "",
"requests-time_between_check-minutes": 180,
'application-fetch_backend': "html_webdriver"},
follow_redirects=True
)
@@ -64,22 +61,3 @@ def test_fetch_webdriver_content(client, live_server, measure_memory_usage):
)
assert res.status_code == 200
assert len(res.data) > 10
##################### disable favicons check
res = client.post(
url_for("settings.settings_page"),
data={
"requests-time_between_check-minutes": 180,
'application-ui-favicons_enabled': "",
"application-empty_pages_are_a_change": "",
},
follow_redirects=True
)
assert b"Settings updated." in res.data
res = client.get(
url_for("watchlist.index"),
)
# The UI can access it here
assert f'src="/static/favicon'.encode('utf8') not in res.data

Binary file not shown.

Before

Width:  |  Height:  |  Size: 200 KiB

After

Width:  |  Height:  |  Size: 171 KiB

View File

@@ -69,9 +69,6 @@ werkzeug==3.0.6
# Templating, so far just in the URLs but in the future can be for the notifications also
jinja2~=3.1
jinja2-time
langchain~=0.3
langchain-openai~=0.3
openpyxl
# https://peps.python.org/pep-0508/#environment-markers
# https://github.com/dgtlmoon/changedetection.io/pull/1009
@@ -120,9 +117,6 @@ price-parser
# flask_socket_io - incorrect package name, already have flask-socketio above
# So far for detecting correct favicon type, but for other things in the future
python-magic
# Scheduler - Windows seemed to miss a lot of default timezone info (even "UTC" !)
tzdata