mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-14 13:36:09 +00:00
Compare commits
1 Commits
0.50.10
...
loguru-twe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6020bdceee |
@@ -60,7 +60,7 @@ def main():
|
|||||||
try:
|
try:
|
||||||
opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:l:", "port")
|
opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:l:", "port")
|
||||||
except getopt.GetoptError:
|
except getopt.GetoptError:
|
||||||
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level]')
|
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level - TRACE, DEBUG(default), INFO, SUCCESS, WARNING, ERROR, CRITICAL]')
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
create_datastore_dir = False
|
create_datastore_dir = False
|
||||||
|
|||||||
@@ -211,7 +211,7 @@ class model(dict):
|
|||||||
# Read the history file as a dict
|
# Read the history file as a dict
|
||||||
fname = os.path.join(self.watch_data_dir, "history.txt")
|
fname = os.path.join(self.watch_data_dir, "history.txt")
|
||||||
if os.path.isfile(fname):
|
if os.path.isfile(fname):
|
||||||
logger.debug("Reading watch history index")
|
logger.debug(f"Reading watch history index for {self.get('uuid')}")
|
||||||
with open(fname, "r") as f:
|
with open(fname, "r") as f:
|
||||||
for i in f.readlines():
|
for i in f.readlines():
|
||||||
if ',' in i:
|
if ',' in i:
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ class difference_detection_processor():
|
|||||||
proxy_url = None
|
proxy_url = None
|
||||||
if preferred_proxy_id:
|
if preferred_proxy_id:
|
||||||
proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
|
proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
|
||||||
logger.debug(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}")
|
logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}")
|
||||||
|
|
||||||
# Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need.
|
# Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need.
|
||||||
# When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc)
|
# When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc)
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ class ChangeDetectionStore:
|
|||||||
for uuid, watch in self.__data['watching'].items():
|
for uuid, watch in self.__data['watching'].items():
|
||||||
watch['uuid']=uuid
|
watch['uuid']=uuid
|
||||||
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
|
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
|
||||||
logger.debug(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}")
|
logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}")
|
||||||
|
|
||||||
# First time ran, Create the datastore.
|
# First time ran, Create the datastore.
|
||||||
except (FileNotFoundError):
|
except (FileNotFoundError):
|
||||||
@@ -616,7 +616,7 @@ class ChangeDetectionStore:
|
|||||||
|
|
||||||
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
|
||||||
if n == tag.get('title', '').lower().strip():
|
if n == tag.get('title', '').lower().strip():
|
||||||
logger.error(f">>> Tag {name} already exists")
|
logger.warning(f"Tag '{name}' already exists, skipping creation.")
|
||||||
return uuid
|
return uuid
|
||||||
|
|
||||||
# Eventually almost everything todo with a watch will apply as a Tag
|
# Eventually almost everything todo with a watch will apply as a Tag
|
||||||
|
|||||||
@@ -220,7 +220,8 @@ class update_worker(threading.Thread):
|
|||||||
def run(self):
|
def run(self):
|
||||||
|
|
||||||
from .processors import text_json_diff, restock_diff
|
from .processors import text_json_diff, restock_diff
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
while not self.app.config.exit.is_set():
|
while not self.app.config.exit.is_set():
|
||||||
update_handler = None
|
update_handler = None
|
||||||
|
|
||||||
@@ -232,13 +233,12 @@ class update_worker(threading.Thread):
|
|||||||
else:
|
else:
|
||||||
uuid = queued_item_data.item.get('uuid')
|
uuid = queued_item_data.item.get('uuid')
|
||||||
self.current_uuid = uuid
|
self.current_uuid = uuid
|
||||||
|
|
||||||
if uuid in list(self.datastore.data['watching'].keys()) and self.datastore.data['watching'][uuid].get('url'):
|
if uuid in list(self.datastore.data['watching'].keys()) and self.datastore.data['watching'][uuid].get('url'):
|
||||||
changed_detected = False
|
changed_detected = False
|
||||||
contents = b''
|
contents = b''
|
||||||
process_changedetection_results = True
|
process_changedetection_results = True
|
||||||
update_obj = {}
|
update_obj = {}
|
||||||
logger.debug(f"> Processing UUID {uuid} "
|
logger.info(f"Processing watch UUID {uuid} "
|
||||||
f"Priority {queued_item_data.priority} "
|
f"Priority {queued_item_data.priority} "
|
||||||
f"URL {self.datastore.data['watching'][uuid]['url']}")
|
f"URL {self.datastore.data['watching'][uuid]['url']}")
|
||||||
now = time.time()
|
now = time.time()
|
||||||
@@ -280,7 +280,8 @@ class update_worker(threading.Thread):
|
|||||||
if not isinstance(contents, (bytes, bytearray)):
|
if not isinstance(contents, (bytes, bytearray)):
|
||||||
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
|
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
|
||||||
except PermissionError as e:
|
except PermissionError as e:
|
||||||
self.app.logger.error("File permission error updating", uuid, str(e))
|
logger.critical(f"File permission error updating file, watch: {uuid}")
|
||||||
|
logger.critical(str(e))
|
||||||
process_changedetection_results = False
|
process_changedetection_results = False
|
||||||
except content_fetcher.ReplyWithContentButNoText as e:
|
except content_fetcher.ReplyWithContentButNoText as e:
|
||||||
# Totally fine, it's by choice - just continue on, nothing more to care about
|
# Totally fine, it's by choice - just continue on, nothing more to care about
|
||||||
@@ -428,11 +429,13 @@ class update_worker(threading.Thread):
|
|||||||
process_changedetection_results = False
|
process_changedetection_results = False
|
||||||
except UnableToExtractRestockData as e:
|
except UnableToExtractRestockData as e:
|
||||||
# Usually when fetcher.instock_data returns empty
|
# Usually when fetcher.instock_data returns empty
|
||||||
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
|
logger.error(f"Exception (UnableToExtractRestockData) reached processing watch UUID: {uuid}")
|
||||||
|
logger.error(str(e))
|
||||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server)"})
|
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server)"})
|
||||||
process_changedetection_results = False
|
process_changedetection_results = False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
|
logger.error(f"Exception reached processing watch UUID: {uuid}")
|
||||||
|
logger.error(str(e))
|
||||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
||||||
# Other serious error
|
# Other serious error
|
||||||
process_changedetection_results = False
|
process_changedetection_results = False
|
||||||
@@ -478,9 +481,8 @@ class update_worker(threading.Thread):
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
|
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
|
||||||
logger.critical("!!!! Exception in update_worker !!!")
|
logger.critical("!!!! Exception in update_worker while processing process_changedetection_results !!!")
|
||||||
logger.critical(str(e))
|
logger.critical(str(e))
|
||||||
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
|
|
||||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
||||||
|
|
||||||
if self.datastore.data['watching'].get(uuid):
|
if self.datastore.data['watching'].get(uuid):
|
||||||
@@ -500,6 +502,7 @@ class update_worker(threading.Thread):
|
|||||||
|
|
||||||
self.current_uuid = None # Done
|
self.current_uuid = None # Done
|
||||||
self.q.task_done()
|
self.q.task_done()
|
||||||
|
logger.debug(f"Watch {uuid} done in {time.time()-now:.2f}s")
|
||||||
|
|
||||||
# Give the CPU time to interrupt
|
# Give the CPU time to interrupt
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|||||||
Reference in New Issue
Block a user