Compare commits

...

2 Commits

Author SHA1 Message Date
dgtlmoon
343e371032 Fix dupes in request test 2022-01-25 10:50:25 +01:00
dgtlmoon
2fa1d48f01 Don't allow duplicate URLs on import re #377 2022-01-25 10:22:39 +01:00
2 changed files with 1 additions and 25 deletions

View File

@@ -629,7 +629,7 @@ def changedetection_app(config=None, datastore_o=None):
for url in urls:
url = url.strip()
# Flask wtform validators wont work with basic auth, use validators package
if len(url) and validators.url(url):
if len(url) and validators.url(url) and not datastore.url_exists(url):
new_uuid = datastore.add_watch(url=url.strip(), tag="")
# Straight into the queue.
update_q.put(new_uuid)

View File

@@ -20,13 +20,6 @@ def test_headers_in_request(client, live_server):
)
assert b"1 Imported" in res.data
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
cookie_header = '_ga=GA1.2.1022228332; cookie-preferences=analytics:accepted;'
@@ -42,7 +35,6 @@ def test_headers_in_request(client, live_server):
)
assert b"Updated watch." in res.data
# Give the thread time to pick up the first version
time.sleep(5)
@@ -77,14 +69,6 @@ def test_body_in_request(client, live_server):
# Add our URL to the import page
test_url = url_for('test_body', _external=True)
# Add the test URL twice, we will check
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
res = client.post(
url_for("import_page"),
data={"urls": test_url},
@@ -146,14 +130,6 @@ def test_method_in_request(client, live_server):
# Add our URL to the import page
test_url = url_for('test_method', _external=True)
# Add the test URL twice, we will check
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
res = client.post(
url_for("import_page"),
data={"urls": test_url},