mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-31 14:47:21 +00:00 
			
		
		
		
	Compare commits
	
		
			8 Commits
		
	
	
		
			notificati
			...
			openai-int
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | df9258a8f7 | ||
|   | c070265668 | ||
|   | 48921c878d | ||
|   | 44384386cc | ||
|   | 3513676bc6 | ||
|   | 559b729475 | ||
|   | 8937df7b0b | ||
|   | 9a015041a5 | 
| @@ -1,7 +1,7 @@ | ||||
| {% extends 'base.html' %} | ||||
|  | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_simple_field, render_button, render_time_schedule_form %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script> | ||||
|     const notification_base_url="{{url_for('ui.ui_notification.ajax_callback_send_notification_test', mode="global-settings")}}"; | ||||
| @@ -23,6 +23,7 @@ | ||||
|             <li class="tab"><a href="#fetching">Fetching</a></li> | ||||
|             <li class="tab"><a href="#filters">Global Filters</a></li> | ||||
|             <li class="tab"><a href="#ui-options">UI Options</a></li> | ||||
|             <li class="tab"><a href="#ai-options"><i data-feather="aperture" style="width: 14px; height: 14px; margin-right: 4px;"></i> AI</a></li> | ||||
|             <li class="tab"><a href="#api">API</a></li> | ||||
|             <li class="tab"><a href="#timedate">Time & Date</a></li> | ||||
|             <li class="tab"><a href="#proxies">CAPTCHA & Proxies</a></li> | ||||
| @@ -262,6 +263,24 @@ nav | ||||
|                 </div> | ||||
|  | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="ai-options"> | ||||
|                 <p><strong>New:</strong> click here (link to changedetection.io tutorial page) find out how to setup and example</p> | ||||
|                 <br> | ||||
|                 key fields should be some password type field so you can see its set but doesnt contain the key on view and doesnt lose it on save<br> | ||||
|  | ||||
|                 <div class="pure-control-group inline-radio"> | ||||
|                     {{ render_simple_field(form.application.form.ai.form.LLM_backend) }} | ||||
|                     <span class="pure-form-message-inline">Preferred LLM connection</span> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.application.form.ai.form.API_keys.form.openai) }} | ||||
|                     <span class="pure-form-message-inline">Go here to read more about OpenAI integration</span> | ||||
|                 </div> | ||||
|                 <div class="pure-control-group"> | ||||
|                     {{ render_checkbox_field(form.application.form.ai.form.API_keys.form.gemini) }} | ||||
|                     <span class="pure-form-message-inline">Go here to read more about Google Gemini integration</span> | ||||
|                 </div> | ||||
|             </div> | ||||
|             <div class="tab-pane-inner" id="proxies"> | ||||
|                 <div id="recommended-proxy"> | ||||
|                     <div> | ||||
|   | ||||
| @@ -25,7 +25,7 @@ | ||||
|     <div class="tabs collapsable"> | ||||
|         <ul> | ||||
|             <li class="tab" id=""><a href="#general">General</a></li> | ||||
|             <li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
|             <li class="tab"><a href="#filters-and-triggers">AI, Filters & Triggers</a></li> | ||||
|             {% if extra_tab_content %} | ||||
|             <li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li> | ||||
|             {% endif %} | ||||
|   | ||||
| @@ -312,8 +312,27 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|         '''For when viewing the "preview" of the rendered text from inside of Edit''' | ||||
|         from flask import jsonify | ||||
|         from changedetectionio.processors.text_json_diff import prepare_filter_prevew | ||||
|         result = prepare_filter_prevew(watch_uuid=uuid, form_data=request.form, datastore=datastore) | ||||
|         return jsonify(result) | ||||
|  | ||||
|         watch = datastore.data["watching"].get(uuid) | ||||
|  | ||||
|         if not watch: | ||||
|             return jsonify({ | ||||
|                 "error": "Watch not found", | ||||
|                 "code": 400 | ||||
|             }), 400 | ||||
|  | ||||
|         if not watch.history_n: | ||||
|             return jsonify({ | ||||
|                 "error": "Watch has empty history, at least one fetch of the page is required.", | ||||
|                 "code": 400 | ||||
|             }), 400 | ||||
|         # | ||||
|         try: | ||||
|             result = prepare_filter_prevew(watch_uuid=uuid, form_data=request.form, datastore=datastore) | ||||
|             return jsonify(result) | ||||
|         except Exception as e: | ||||
|             return abort(500, str(e)) | ||||
|  | ||||
|  | ||||
|     @edit_blueprint.route("/highlight_submit_ignore_url", methods=['POST']) | ||||
|     @login_optionally_required | ||||
|   | ||||
| @@ -212,7 +212,14 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q, queuedWatchMe | ||||
|  | ||||
|         add_paused = request.form.get('edit_and_watch_submit_button') != None | ||||
|         processor = request.form.get('processor', 'text_json_diff') | ||||
|         new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras={'paused': add_paused, 'processor': processor}) | ||||
|         extras = {'paused': add_paused, 'processor': processor} | ||||
|  | ||||
|         LLM_prompt = request.form.get('LLM_prompt', '').strip() | ||||
|         if LLM_prompt: | ||||
|             extras['LLM_prompt'] = LLM_prompt | ||||
|             extras['LLM_send_type'] = request.form.get('LLM_send_type', 'text') | ||||
|  | ||||
|         new_uuid = datastore.add_watch(url=url, tag=request.form.get('tags').strip(), extras=extras) | ||||
|  | ||||
|         if new_uuid: | ||||
|             if add_paused: | ||||
|   | ||||
| @@ -5,12 +5,7 @@ | ||||
| <script src="{{url_for('static_content', group='js', filename='watch-overview.js')}}" defer></script> | ||||
| <script>let nowtimeserver={{ now_time_server }};</script> | ||||
| <script>let favicon_baseURL="{{ url_for('static_content', group='favicon', filename="PLACEHOLDER")}}";</script> | ||||
| <script> | ||||
| // Initialize Feather icons after the page loads | ||||
| document.addEventListener('DOMContentLoaded', function() { | ||||
|     feather.replace(); | ||||
| }); | ||||
| </script> | ||||
|  | ||||
| <style> | ||||
| .checking-now .last-checked { | ||||
|     background-image: linear-gradient(to bottom, transparent 0%, rgba(0,0,0,0.05) 40%, rgba(0,0,0,0.1) 100%); | ||||
| @@ -31,8 +26,12 @@ document.addEventListener('DOMContentLoaded', function() { | ||||
|                     {{ render_nolabel_field(form.edit_and_watch_submit_button, title="Edit first then Watch") }} | ||||
|             </div> | ||||
|             <div id="watch-group-tag"> | ||||
|                 <i data-feather="tag" style="width: 14px; height: 14px; stroke: white; margin-right: 4px;"></i> | ||||
|                {{ render_field(form.tags, value=active_tag.title if active_tag_uuid else '', placeholder="Watch group / tag", class="transparent-field") }} | ||||
|             </div> | ||||
|  | ||||
|             {%- include 'edit/llm_prompt.html' -%} | ||||
|  | ||||
|             <div id="quick-watch-processor-type"> | ||||
|                 {{ render_simple_field(form.processor) }} | ||||
|             </div> | ||||
|   | ||||
| @@ -55,6 +55,18 @@ valid_method = { | ||||
| default_method = 'GET' | ||||
| allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) | ||||
|  | ||||
| LLM_example_texts = ['Tell me simply "Price, In stock"', | ||||
|                      'Give me a list of all products for sale in this text', | ||||
|                      'Tell me simply "Yes" "No" or "Maybe" if you think the weather outlook is good for a 4-day small camping trip', | ||||
|                      'Look at this restaurant menu and only give me list of meals you think are good for type 2 diabetics, if nothing is found just say "nothing"', | ||||
|                      ] | ||||
|  | ||||
| LLM_send_type_choices = [('text', 'Plain text after filters'), | ||||
|                          ('above_fold_text', 'Text above the fold'), | ||||
|                          ('Screenshot', 'Screenshot / Selection'), | ||||
|                          ('HTML', 'HTML Source') | ||||
|                          ] | ||||
|  | ||||
| class StringListField(StringField): | ||||
|     widget = widgets.TextArea() | ||||
|  | ||||
| @@ -515,11 +527,15 @@ class ValidateCSSJSONXPATHInput(object): | ||||
|  | ||||
| class quickWatchForm(Form): | ||||
|     from . import processors | ||||
|     import random | ||||
|  | ||||
|     url = fields.URLField('URL', validators=[validateURL()]) | ||||
|     tags = StringTagUUID('Group tag', [validators.Optional()]) | ||||
|     watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|     processor = RadioField(u'Processor', choices=processors.available_processors(), default="text_json_diff") | ||||
|     LLM_prompt = TextAreaField(u'AI Prompt', [validators.Optional()], render_kw={"placeholder": f'Example, "{random.choice(LLM_example_texts)}"'}) | ||||
|     LLM_send_type = RadioField(u'LLM Send', choices=LLM_send_type_choices, default="text") | ||||
|  | ||||
|     edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"}) | ||||
|  | ||||
|  | ||||
| @@ -527,6 +543,7 @@ class quickWatchForm(Form): | ||||
| # Common to a single watch and the global settings | ||||
| class commonSettingsForm(Form): | ||||
|     from . import processors | ||||
|     import random | ||||
|  | ||||
|     def __init__(self, formdata=None, obj=None, prefix="", data=None, meta=None, **kwargs): | ||||
|         super().__init__(formdata, obj, prefix, data, meta, **kwargs) | ||||
| @@ -544,6 +561,8 @@ class commonSettingsForm(Form): | ||||
|     timezone = StringField("Timezone for watch schedule", render_kw={"list": "timezones"}, validators=[validateTimeZoneName()]) | ||||
|     webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1, message="Should contain one or more seconds")]) | ||||
|  | ||||
|     LLM_prompt = TextAreaField(u'AI Prompt', [validators.Optional()], render_kw={"placeholder": f'Example, "{random.choice(LLM_example_texts)}"'}) | ||||
|     LLM_send_type = RadioField(u'LLM Send', choices=LLM_send_type_choices, default="text") | ||||
|  | ||||
| class importForm(Form): | ||||
|     from . import processors | ||||
| @@ -742,6 +761,29 @@ class globalSettingsApplicationUIForm(Form): | ||||
|     socket_io_enabled = BooleanField('Realtime UI Updates Enabled', default=True, validators=[validators.Optional()]) | ||||
|     favicons_enabled = BooleanField('Favicons Enabled', default=True, validators=[validators.Optional()]) | ||||
|  | ||||
| class globalSettingsApplicationAIKeysForm(Form): | ||||
|  | ||||
|     openai = StringField('OpenAI Key', | ||||
|                            validators=[validators.Optional()], | ||||
|                            render_kw={"placeholder": 'xxxxxxxxx'} | ||||
|                            ) | ||||
|     gemini = StringField('Google Gemini Key', | ||||
|                            validators=[validators.Optional()], | ||||
|                            render_kw={"placeholder": 'ooooooooo'} | ||||
|                            ) | ||||
|  | ||||
| class globalSettingsApplicationAIForm(Form): | ||||
|  | ||||
|     #@todo use only configured types? | ||||
|     LLM_backend = RadioField(u'LLM Backend', | ||||
|                                choices=[('openai', 'Open AI'), ('gemini', 'Gemini')], | ||||
|                                default="text") | ||||
|  | ||||
|     # So that we can pass this to our LLM/__init__.py as a keys dict | ||||
|     API_keys = FormField(globalSettingsApplicationAIKeysForm) | ||||
|  | ||||
|  | ||||
|  | ||||
| # datastore.data['settings']['application'].. | ||||
| class globalSettingsApplicationForm(commonSettingsForm): | ||||
|  | ||||
| @@ -774,6 +816,8 @@ class globalSettingsApplicationForm(commonSettingsForm): | ||||
|                                                                                                      message="Should contain zero or more attempts")]) | ||||
|     ui = FormField(globalSettingsApplicationUIForm) | ||||
|  | ||||
|     ai = FormField(globalSettingsApplicationAIForm) | ||||
|  | ||||
|  | ||||
| class globalSettingsForm(Form): | ||||
|     # Define these as FormFields/"sub forms", this way it matches the JSON storage | ||||
|   | ||||
| @@ -65,6 +65,10 @@ class model(dict): | ||||
|                         'socket_io_enabled': True, | ||||
|                         'favicons_enabled': True | ||||
|                     }, | ||||
|                     'ai': { | ||||
|                         'openai_key': None, | ||||
|                         'gemini_key': None | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|   | ||||
| @@ -38,6 +38,9 @@ class watch_base(dict): | ||||
|             'last_error': False, | ||||
|             'last_notification_error': None, | ||||
|             'last_viewed': 0,  # history key value of the last viewed via the [diff] link | ||||
|             'LLM_prompt': None, | ||||
|             'LLM_send_type': None, | ||||
|             'LLM_backend': None, | ||||
|             'method': 'GET', | ||||
|             'notification_alert_count': 0, | ||||
|             'notification_body': None, | ||||
|   | ||||
							
								
								
									
										64
									
								
								changedetectionio/processors/LLM/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								changedetectionio/processors/LLM/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| import importlib | ||||
| from langchain_core.messages import SystemMessage, HumanMessage | ||||
|  | ||||
| SYSTEM_MESSAGE = ( | ||||
|     "You are a text analyser who will attempt to give the most concise information " | ||||
|     "to the request, the information should be returned in a way that if I ask you again " | ||||
|     "I should get the same answer if the outcome is the same. The goal is to cut down " | ||||
|     "or reduce the text changes from you when i ask the same question about similar content " | ||||
|     "Always list items in exactly the same order and wording as found in the source text. " | ||||
| ) | ||||
|  | ||||
|  | ||||
| class LLM_integrate: | ||||
|     PROVIDER_MAP = { | ||||
|         "openai": ("langchain_openai", "ChatOpenAI"), | ||||
|         "azure": ("langchain_community.chat_models", "AzureChatOpenAI"), | ||||
|         "gemini": ("langchain_google_genai", "ChatGoogleGenerativeAI") | ||||
|     } | ||||
|  | ||||
|     def __init__(self, api_keys: dict): | ||||
|         """ | ||||
|         api_keys = { | ||||
|             "openai": "sk-xxx", | ||||
|             "azure": "AZURE_KEY", | ||||
|             "gemini": "GEMINI_KEY" | ||||
|         } | ||||
|         """ | ||||
|         self.api_keys = api_keys | ||||
|  | ||||
|     def run(self, provider: str, model: str, message: str): | ||||
|         module_name, class_name = self.PROVIDER_MAP[provider] | ||||
|  | ||||
|         # Import the class dynamically | ||||
|         module = importlib.import_module(module_name) | ||||
|         LLMClass = getattr(module, class_name) | ||||
|  | ||||
|         # Create the LLM object | ||||
|         llm_kwargs = {} | ||||
|         if provider == "openai": | ||||
|             llm_kwargs = dict(api_key=self.api_keys.get("openai", ''), | ||||
|                               model=model, | ||||
|                               # https://api.python.langchain.com/en/latest/chat_models/langchain_openai.chat_models.base.ChatOpenAI.html#langchain_openai.chat_models.base.ChatOpenAI.temperature | ||||
|                               temperature=0 # most deterministic, | ||||
|                               ) | ||||
|         elif provider == "azure": | ||||
|             llm_kwargs = dict( | ||||
|                 api_key=self.api_keys["azure"], | ||||
|                 azure_endpoint="https://<your-endpoint>.openai.azure.com", | ||||
|                 deployment_name=model | ||||
|             ) | ||||
|         elif provider == "gemini": | ||||
|             llm_kwargs = dict(api_key=self.api_keys.get("gemini"), model=model) | ||||
|  | ||||
|         llm = LLMClass(**llm_kwargs) | ||||
|  | ||||
|         # Build your messages | ||||
|         messages = [ | ||||
|             SystemMessage(content=SYSTEM_MESSAGE), | ||||
|             HumanMessage(content=message) | ||||
|         ] | ||||
|  | ||||
|         # Run the model asynchronously | ||||
|         result = llm.invoke(messages) | ||||
|         return result.content | ||||
| @@ -1,5 +1,6 @@ | ||||
| from abc import abstractmethod | ||||
| from changedetectionio.content_fetchers.base import Fetcher | ||||
| from changedetectionio.processors.LLM import LLM_integrate | ||||
| from changedetectionio.strtobool import strtobool | ||||
| from copy import deepcopy | ||||
| from loguru import logger | ||||
|   | ||||
| @@ -7,7 +7,7 @@ import re | ||||
| import urllib3 | ||||
|  | ||||
| from changedetectionio.conditions import execute_ruleset_against_all_plugins | ||||
| from changedetectionio.processors import difference_detection_processor | ||||
| from changedetectionio.processors import difference_detection_processor, LLM_integrate | ||||
| from changedetectionio.html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text, TRANSLATE_WHITESPACE_TABLE | ||||
| from changedetectionio import html_tools, content_fetchers | ||||
| from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT | ||||
| @@ -293,6 +293,30 @@ class perform_site_check(difference_detection_processor): | ||||
|             # we end up with 'Some text\n\n', sorting will add all those extra \n at the start, so we remove them here. | ||||
|             stripped_text_from_html = stripped_text_from_html.replace("\n\n", "\n") | ||||
|             stripped_text_from_html = '\n'.join(sorted(stripped_text_from_html.splitlines(), key=lambda x: x.lower())) | ||||
| ### OPENAI? | ||||
|  | ||||
|  | ||||
|         # And here we run LLM integration based on the content we received | ||||
|         LLM_keys =  self.datastore.data['settings']['application']['ai'].get('API_keys', {}) | ||||
|         if watch.get('LLM_prompt') and stripped_text_from_html and LLM_keys: | ||||
|             response = "" | ||||
|             try: | ||||
|                 integrator = LLM_integrate(api_keys=LLM_keys) | ||||
|                 response = integrator.run( | ||||
|                     provider="openai", | ||||
|                     model="gpt-4.1", #gpt-4-turbo | ||||
|                     message=f"{watch.get('LLM_prompt')}\n----------- Content follows-----------\n\n{stripped_text_from_html}" | ||||
|                 ) | ||||
|             except Exception as e: | ||||
|                 logger.critical(f"Error running LLM integration {str(e)} (type etc)") | ||||
|                 raise(e) | ||||
|                 x = 1 | ||||
|                 # todo is there something special when tokens are used up etc? | ||||
|             else: | ||||
|                 stripped_text_from_html = response | ||||
|                # logger.trace("LLM done") | ||||
|             finally: | ||||
|                 logger.debug("LLM request done (type etc)") | ||||
|  | ||||
| ### CALCULATE MD5 | ||||
|         # If there's text to ignore | ||||
|   | ||||
							
								
								
									
										
											BIN
										
									
								
								changedetectionio/static/images/open-ai-logo.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								changedetectionio/static/images/open-ai-logo.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 15 KiB | 
| @@ -21,6 +21,7 @@ function request_textpreview_update() { | ||||
|         namespace: 'watchEdit' | ||||
|     }).done(function (data) { | ||||
|         console.debug(data['duration']) | ||||
|         $('#error-text').text(data['duration']); | ||||
|         $('#filters-and-triggers #text-preview-before-inner').text(data['before_filter']); | ||||
|         $('#filters-and-triggers #text-preview-inner') | ||||
|             .text(data['after_filter']) | ||||
| @@ -37,9 +38,8 @@ function request_textpreview_update() { | ||||
|     }).fail(function (error) { | ||||
|         if (error.statusText === 'abort') { | ||||
|             console.log('Request was aborted due to a new request being fired.'); | ||||
|         } else { | ||||
|             $('#filters-and-triggers #text-preview-inner').text('There was an error communicating with the server.'); | ||||
|         } | ||||
|         $('#error-text').text(error.responseJSON['error']); | ||||
|     }) | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -1,3 +1,5 @@ | ||||
| @use "_llm-prompt"; | ||||
|  | ||||
| ul#conditions_match_logic { | ||||
|     list-style: none; | ||||
|   input, label, li { | ||||
|   | ||||
							
								
								
									
										59
									
								
								changedetectionio/static/styles/scss/parts/_llm-prompt.scss
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								changedetectionio/static/styles/scss/parts/_llm-prompt.scss
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | ||||
| #form-quick-watch-add #openai-prompt { | ||||
|    color: var(--color-white); | ||||
| } | ||||
|  | ||||
|  | ||||
| #llm-prompt-all { | ||||
|   .label { | ||||
|     display: block !important; | ||||
|   } | ||||
|  | ||||
|   textarea { | ||||
|     white-space: pre-wrap; | ||||
|     overflow-wrap: break-word; | ||||
|     word-wrap: break-word; /* legacy support */ | ||||
|     font-size: 13px; | ||||
|   } | ||||
|   ul { | ||||
|     list-style: none; | ||||
|     padding-left: 0px; | ||||
|  | ||||
|     li { | ||||
|       display: flex; | ||||
|       align-items: center; | ||||
|       gap: 0.5em; | ||||
|       padding-bottom: 0.3em; | ||||
|  | ||||
|       > * { | ||||
|         margin: 0px; | ||||
|         padding: 0px; | ||||
|       } | ||||
|  | ||||
|       label { | ||||
|         font-weight: normal; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| } | ||||
|  | ||||
| @media (min-width: 768px) { | ||||
|   #llm-prompt-all { | ||||
|     display: grid; | ||||
|     grid-template-columns: 1fr auto auto; | ||||
|     column-gap: 1.5rem; | ||||
|     align-items: start; | ||||
|  | ||||
|     font-size: 0.9rem; | ||||
|     padding: 0.3rem; | ||||
|  | ||||
|     #llm-prompt { | ||||
|       /* ensure the textarea stretches horizontally */ | ||||
|       width: 100%; | ||||
|  | ||||
|       textarea { | ||||
|         width: 100%; | ||||
|         box-sizing: border-box; | ||||
|       } | ||||
|     } | ||||
|   } | ||||
| } | ||||
| @@ -792,7 +792,9 @@ textarea::placeholder { | ||||
|       border-top-left-radius: 5px; | ||||
|       border-top-right-radius: 5px; | ||||
|       background-color: var(--color-background-tab); | ||||
|  | ||||
|       svg { | ||||
|         stroke: var(--color-text-tab); | ||||
|       } | ||||
|       &:not(.active) { | ||||
|         &:hover { | ||||
|           background-color: var(--color-background-tab-hover); | ||||
| @@ -802,11 +804,13 @@ textarea::placeholder { | ||||
|       &.active, | ||||
|       :target { | ||||
|         background-color: var(--color-background); | ||||
|  | ||||
|         a { | ||||
|           color: var(--color-text-tab-active); | ||||
|           font-weight: bold; | ||||
|         } | ||||
|         svg { | ||||
|           stroke: var(--color-text-tab-active); | ||||
|         } | ||||
|       } | ||||
|  | ||||
|       a { | ||||
|   | ||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							| @@ -38,6 +38,12 @@ | ||||
|     <script src="{{url_for('static_content', group='js', filename='socket.io.min.js')}}"></script> | ||||
|     <script src="{{url_for('static_content', group='js', filename='realtime.js')}}" defer></script> | ||||
|     {% endif %} | ||||
|   <script> | ||||
|     // Initialize Feather icons after the page loads | ||||
|     document.addEventListener('DOMContentLoaded', function() { | ||||
|         feather.replace(); | ||||
|     }); | ||||
|     </script> | ||||
|   </head> | ||||
|  | ||||
|   <body class=""> | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| {% extends 'base.html' %} | ||||
| {% block content %} | ||||
| {% from '_helpers.html' import render_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table %} | ||||
| {% from '_helpers.html' import render_field,  render_simple_field, render_checkbox_field, render_button, render_time_schedule_form, playwright_warning, only_playwright_type_watches_warning, render_conditions_fieldlist_of_formfields_as_table %} | ||||
| {% from '_common_fields.html' import render_common_settings_form %} | ||||
| <script src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script> | ||||
| <script src="{{url_for('static_content', group='js', filename='vis.js')}}" defer></script> | ||||
| @@ -52,7 +52,7 @@ | ||||
|         <!-- should goto extra forms? --> | ||||
|             {% if watch['processor'] == 'text_json_diff' %} | ||||
|             <li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li> | ||||
|             <li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">Filters & Triggers</a></li> | ||||
|             <li class="tab" id="filters-and-triggers-tab"><a href="#filters-and-triggers">AI, Filters & Triggers</a></li> | ||||
|             <li class="tab" id="conditions-tab"><a href="#conditions">Conditions</a></li> | ||||
|             {% endif %} | ||||
|             <li class="tab"><a href="#notifications">Notifications</a></li> | ||||
| @@ -316,7 +316,6 @@ Math: {{ 1 + 1 }}") }} | ||||
|                                 </li> | ||||
|                             </ul> | ||||
|                     </div> | ||||
|  | ||||
| {% include "edit/include_subtract.html" %} | ||||
|                 <div class="text-filtering border-fieldset"> | ||||
|                 <fieldset class="pure-group" id="text-filtering-type-options"> | ||||
| @@ -364,6 +363,7 @@ Math: {{ 1 + 1 }}") }} | ||||
|                           </div> | ||||
|                       </div> | ||||
|                     </div> | ||||
|                   <p id="error-text"></p> | ||||
|             </div> | ||||
|           </div> | ||||
|         </div> | ||||
|   | ||||
| @@ -1,4 +1,7 @@ | ||||
|                     <div class="pure-control-group"> | ||||
|                         <div class="pure-control-group"> | ||||
|                         {%- include 'edit/llm_prompt.html' -%} | ||||
|                         </div> | ||||
|                         <div class="pure-control-group"> | ||||
|                         {% set field = render_field(form.include_filters, | ||||
|                             rows=5, | ||||
|                             placeholder=has_tag_filters_extra+"#example | ||||
|   | ||||
							
								
								
									
										12
									
								
								changedetectionio/templates/edit/llm_prompt.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								changedetectionio/templates/edit/llm_prompt.html
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
|                    <div class="pure-control-group" id="ai-filter-options"> | ||||
|                         <div id="openai-prompt"> | ||||
|                             <div id="llm-prompt-all"> | ||||
|                                 <div id="llm-prompt"> | ||||
|                                     {{ render_simple_field(form.LLM_prompt, rows=5) }} | ||||
|                                 </div> | ||||
|                                 <div id="llm-send-type"> | ||||
|                                     {{ render_simple_field(form.LLM_send_type) }} | ||||
|                                 </div> | ||||
|                             </div> | ||||
|                         </div> | ||||
|                     </div> | ||||
| @@ -69,6 +69,9 @@ werkzeug==3.0.6 | ||||
| # Templating, so far just in the URLs but in the future can be for the notifications also | ||||
| jinja2~=3.1 | ||||
| jinja2-time | ||||
| langchain~=0.3 | ||||
| langchain-openai~=0.3 | ||||
|  | ||||
| openpyxl | ||||
| # https://peps.python.org/pep-0508/#environment-markers | ||||
| # https://github.com/dgtlmoon/changedetection.io/pull/1009 | ||||
|   | ||||
		Reference in New Issue
	
	Block a user