Compare commits

..

1 Commits

Author SHA1 Message Date
dgtlmoon
08c9b55e0f Attempt to save last data retrieved- WIP 2022-08-24 09:33:14 +02:00
4 changed files with 28 additions and 6 deletions

View File

@@ -503,7 +503,7 @@ def changedetection_app(config=None, datastore_o=None):
from changedetectionio import fetch_site_status
# Get the most recent one
newest_history_key = datastore.data['watching'][uuid].get('newest_history_key')
newest_history_key = datastore.get_val(uuid, 'newest_history_key')
# 0 means that theres only one, so that there should be no 'unviewed' history available
if newest_history_key == 0:

View File

@@ -15,6 +15,7 @@ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class perform_site_check():
screenshot = None
xpath_data = None
fetched_response = None
def __init__(self, *args, datastore, **kwargs):
super().__init__(*args, **kwargs)
@@ -63,11 +64,13 @@ class perform_site_check():
def run(self, uuid):
timestamp = int(time.time()) # used for storage etc too
changed_detected = False
screenshot = False # as bytes
stripped_text_from_html = ""
watch = self.datastore.data['watching'].get(uuid)
watch = self.datastore.data['watching'][uuid]
# Protect against file:// access
if re.search(r'^file', watch['url'], re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
@@ -78,7 +81,7 @@ class perform_site_check():
# Unset any existing notification error
update_obj = {'last_notification_error': False, 'last_error': False}
extra_headers =self.datastore.data['watching'][uuid].get('headers')
extra_headers = self.datastore.get_val(uuid, 'headers')
# Tweak the base config with the per-watch ones
request_headers = self.datastore.data['settings']['headers'].copy()
@@ -91,9 +94,9 @@ class perform_site_check():
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
timeout = self.datastore.data['settings']['requests']['timeout']
url = watch.get('url')
request_body = self.datastore.data['watching'][uuid].get('body')
request_method = self.datastore.data['watching'][uuid].get('method')
url = self.datastore.get_val(uuid, 'url')
request_body = self.datastore.get_val(uuid, 'body')
request_method = self.datastore.get_val(uuid, 'method')
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
# source: support
@@ -129,6 +132,7 @@ class perform_site_check():
self.screenshot = fetcher.screenshot
self.xpath_data = fetcher.xpath_data
self.fetched_response = fetcher.content
# Fetching complete, now filters
# @todo move to class / maybe inside of fetcher abstract base?

View File

@@ -244,6 +244,10 @@ class ChangeDetectionStore:
return False
def get_val(self, uuid, val):
# Probably their should be dict...
return self.data['watching'][uuid].get(val)
# Remove a watchs data but keep the entry (URL etc)
def clear_watch_history(self, uuid):
import pathlib
@@ -371,6 +375,17 @@ class ChangeDetectionStore:
f.write(json.dumps(data))
f.close()
# Save whatever was returned from the fetcher
def save_last_response(self, watch_uuid, data):
if not self.data['watching'].get(watch_uuid):
return
target_path = os.path.join(self.datastore_path, watch_uuid, "last-response.bin")
# mimetype? binary? text? @todo
# gzip if its non-binary? auto get encoding?
with open(target_path, 'wb') as f:
f.write(data)
f.close()
def sync_to_json(self):
logging.info("Saving JSON..")

View File

@@ -286,6 +286,9 @@ class update_worker(threading.Thread):
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=update_handler.screenshot)
if update_handler.xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=update_handler.xpath_data)
if update_handler.fetched_response:
# @todo mimetype?
self.datastore.save_last_response(watch_uuid=uuid, data=update_handler.fetched_response)
self.current_uuid = None # Done