Compare commits

...

11 Commits

Author SHA1 Message Date
dgtlmoon
02160fcbb5 tweaks 2024-06-07 10:52:33 +02:00
dgtlmoon
89bdc4d8ab Refactor the wait-for-check 2024-06-07 10:38:42 +02:00
dgtlmoon
ccd6f1cb3f hmm 2024-06-07 09:52:53 +02:00
dgtlmoon
51ea6111b1 Merge branch 'master' into test-speedups 2024-06-07 09:18:03 +02:00
dgtlmoon
35f9bdc8b0 NFI 2024-06-07 09:14:40 +02:00
dgtlmoon
2ebb2b0013 just needs basic delay 2024-06-07 09:05:07 +02:00
dgtlmoon
37bb086bd8 bring back delay 2024-06-07 09:04:29 +02:00
dgtlmoon
1dd5c6fd4d tweaks 2024-06-07 08:57:28 +02:00
dgtlmoon
7285913b34 removing some delays 2024-06-07 08:20:24 +02:00
dgtlmoon
8104b63775 also bump timestamp along 1 sec 2024-06-06 16:09:19 +02:00
dgtlmoon
75f5faa02a improving unique key fix 2024-06-06 15:49:39 +02:00
19 changed files with 47 additions and 72 deletions

View File

@@ -45,7 +45,6 @@ running_update_threads = []
ticker_thread = None
extra_stylesheets = []
update_q = queue.PriorityQueue()
notification_q = queue.Queue()
@@ -1538,6 +1537,11 @@ def changedetection_app(config=None, datastore_o=None):
# paste in etc
return redirect(url_for('index'))
@app.route("/queue_size", methods=['GET'])
@login_optionally_required
def get_queue_size():
return update_q.qsize()
@app.route("/highlight_submit_ignore_url", methods=['POST'])
@login_optionally_required
def highlight_submit_ignore_url():

View File

@@ -217,6 +217,8 @@ class model(dict):
fname = os.path.join(self.watch_data_dir, "history.txt")
if os.path.isfile(fname):
logger.debug(f"Reading watch history index for {self.get('uuid')}")
with open(fname, "r") as f:
for i in f.readlines():
if ',' in i:

View File

@@ -53,7 +53,6 @@ def test_restock_detection(client, live_server):
set_original_response()
#assert os.getenv('PLAYWRIGHT_DRIVER_URL'), "Needs PLAYWRIGHT_DRIVER_URL set for this test"
time.sleep(1)
live_server_setup(live_server)
#####################
notification_url = url_for('test_notification_endpoint', _external=True).replace('http://localhost', 'http://changedet').replace('http', 'json')

View File

@@ -40,8 +40,6 @@ def test_setup(client, live_server):
def test_check_removed_line_contains_trigger(client, live_server):
# Give the endpoint time to spin up
time.sleep(1)
set_original()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)

View File

@@ -2,13 +2,12 @@
import time
from flask import url_for
from . util import live_server_setup
from .util import live_server_setup, wait_for_all_checks
def test_basic_auth(client, live_server):
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_basicauth_method', _external=True).replace("//","//myuser:mypass@")
@@ -19,7 +18,7 @@ def test_basic_auth(client, live_server):
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(1)
wait_for_all_checks(client)
# Check form validation
res = client.post(
@@ -29,7 +28,7 @@ def test_basic_auth(client, live_server):
)
assert b"Updated watch." in res.data
time.sleep(1)
wait_for_all_checks(client)
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True

View File

@@ -81,7 +81,7 @@ def test_setup(client, live_server):
# actually only really used by the distll.io importer, but could be handy too
def test_check_ldjson_price_autodetect(client, live_server):
#live_server_setup(live_server)
set_response_with_ldjson()
# Add our URL to the import page
@@ -104,8 +104,6 @@ def test_check_ldjson_price_autodetect(client, live_server):
client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True))
wait_for_all_checks(client)
# Trigger a check
time.sleep(1)
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
# Offer should be gone
@@ -115,6 +113,9 @@ def test_check_ldjson_price_autodetect(client, live_server):
# and last snapshop (via API) should be just the price
api_key = extract_api_key_from_UI(client)
# Time for writes to happen to history text
time.sleep(0.5)
res = client.get(
url_for("watchsinglehistory", uuid=uuid, timestamp='latest'),
headers={'x-api-key': api_key},

View File

@@ -13,9 +13,6 @@ def test_backup(client, live_server):
set_original_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
res = client.post(
url_for("import_page"),

View File

@@ -68,8 +68,6 @@ def test_check_block_changedetection_text_NOT_present(client, live_server):
set_original_ignore_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)

View File

@@ -10,9 +10,6 @@ def test_trigger_functionality(client, live_server):
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
res = client.post(
url_for("import_page"),

View File

@@ -77,9 +77,6 @@ def test_check_markup_include_filters_restriction(client, live_server):
set_original_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
@@ -89,8 +86,7 @@ def test_check_markup_include_filters_restriction(client, live_server):
)
assert b"1 Imported" in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# Goto the edit page, add our ignore text
# Add our URL to the import page
@@ -100,22 +96,22 @@ def test_check_markup_include_filters_restriction(client, live_server):
follow_redirects=True
)
assert b"Updated watch." in res.data
time.sleep(1)
wait_for_all_checks(client)
# Check it saved
res = client.get(
url_for("edit_page", uuid="first"),
)
assert bytes(include_filters.encode('utf-8')) in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# Make a change
set_modified_response()
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# It should have 'unviewed' still
# Because it should be looking at only that 'sametext' id
@@ -138,8 +134,6 @@ def test_check_multiple_filters(client, live_server):
</html>
""")
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
@@ -149,7 +143,7 @@ def test_check_multiple_filters(client, live_server):
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(1)
wait_for_all_checks(client)
# Goto the edit page, add our ignore text
# Add our URL to the import page
@@ -164,9 +158,7 @@ def test_check_multiple_filters(client, live_server):
)
assert b"Updated watch." in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True

View File

@@ -5,7 +5,7 @@ import time
from flask import url_for
from ..html_tools import *
from .util import live_server_setup
from .util import live_server_setup, wait_for_all_checks
def test_setup(live_server):
@@ -111,16 +111,13 @@ def test_element_removal_full(client, live_server):
set_original_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for("test_endpoint", _external=True)
res = client.post(
url_for("import_page"), data={"urls": test_url}, follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(1)
wait_for_all_checks(client)
# Goto the edit page, add the filter data
# Not sure why \r needs to be added - absent of the #changetext this is not necessary
subtractive_selectors_data = "header\r\nfooter\r\nnav\r\n#changetext"

View File

@@ -27,9 +27,6 @@ def set_html_response():
def test_check_encoding_detection(client, live_server):
set_html_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', content_type="text/html", _external=True)
client.post(
@@ -56,9 +53,6 @@ def test_check_encoding_detection(client, live_server):
def test_check_encoding_detection_missing_content_type_header(client, live_server):
set_html_response()
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
client.post(

View File

@@ -64,8 +64,6 @@ def test_http_error_handler(client, live_server):
# Just to be sure error text is properly handled
def test_DNS_errors(client, live_server):
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
res = client.post(
@@ -89,8 +87,6 @@ def test_DNS_errors(client, live_server):
# Re 1513
def test_low_level_errors_clear_correctly(client, live_server):
#live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write("<html><body><div id=here>Hello world</div></body></html>")

View File

@@ -48,8 +48,6 @@ def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_se
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
set_response_without_filter()
# Add our URL to the import page

View File

@@ -23,8 +23,6 @@ def set_response_with_filter():
def run_filter_test(client, content_filter):
# Give the endpoint time to spin up
time.sleep(1)
# cleanup for the next
client.get(
url_for("form_delete", uuid="all"),
@@ -105,6 +103,10 @@ def run_filter_test(client, content_filter):
wait_for_all_checks(client)
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
# Give apprise time to fire
time.sleep(3)
# Now it should exist and contain our "filter not found" alert
assert os.path.isfile("test-datastore/notification.txt")
@@ -124,6 +126,9 @@ def run_filter_test(client, content_filter):
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
# Give apprise time to fire
time.sleep(3)
# It should have sent a notification, but..
assert os.path.isfile("test-datastore/notification.txt")
# but it should not contain the info about a failed filter (because there was none in this case)
@@ -149,11 +154,13 @@ def test_setup(live_server):
live_server_setup(live_server)
def test_check_include_filters_failure_notification(client, live_server):
#live_server_setup(live_server)
set_original_response()
wait_for_all_checks(client)
run_filter_test(client, '#nope-doesnt-exist')
def test_check_xpath_filter_failure_notification(client, live_server):
# live_server_setup(live_server)
set_original_response()
time.sleep(1)
run_filter_test(client, '//*[@id="nope-doesnt-exist"]')

View File

@@ -11,8 +11,6 @@ from urllib.parse import urlparse, parse_qs
def test_consistent_history(client, live_server):
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
r = range(1, 50)
for one in r:

View File

@@ -45,9 +45,6 @@ def test_render_anchor_tag_content_true(client, live_server):
render_anchor_tag_content setting is set to true"""
sleep_time_for_fetch_thread = 3
# Give the endpoint time to spin up
time.sleep(1)
# set original html text
set_original_ignore_response()

View File

@@ -120,18 +120,18 @@ def extract_UUID_from_client(client):
uuid = m.group(1)
return uuid.strip()
def wait_for_all_checks(client):
# Loop waiting until done..
attempt=0
time.sleep(0.1)
while attempt < 60:
time.sleep(1)
res = client.get(url_for("index"))
if not b'Checking now' in res.data:
break
logging.getLogger().info("Waiting for watch-list to not say 'Checking now'.. {}".format(attempt))
attempt += 1
def wait_for_all_checks(client):
now = time.time()
while time.time() - now <= 30:
time.sleep(0.1)
p = client.application.view_functions['get_queue_size']()
if not p:
break
logging.getLogger().info(f"Waiting for queue to be empty, queue size {p} - {time.time() - now}")
# Empty queue still means that one could be processing, give time for the processing to complete
time.sleep(0.2)
def live_server_setup(live_server):

View File

@@ -160,8 +160,8 @@ class update_worker(threading.Thread):
def send_filter_failure_notification(self, watch_uuid):
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts')
logger.trace(f"Watch UUID: {watch_uuid} - Sending filter failure notification - threshold attempts {threshold}")
watch = self.datastore.data['watching'].get(watch_uuid)
if not watch:
return
@@ -335,6 +335,7 @@ class update_worker(threading.Thread):
process_changedetection_results = False
except FilterNotFoundInResponse as e:
logger.debug(f"Watch UUID: {uuid} - Got FilterNotFoundInResponse exception, Consecutive failures - {self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)} - handling..")
if not self.datastore.data['watching'].get(uuid):
continue
@@ -543,4 +544,4 @@ class update_worker(threading.Thread):
# Give the CPU time to interrupt
time.sleep(0.1)
self.app.config.exit.wait(1)
self.app.config.exit.wait(0.5)