mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-11-01 07:08:47 +00:00
Compare commits
7 Commits
markup-dep
...
1646-clone
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d529f62845 | ||
|
|
43e086ee6f | ||
|
|
df54f6a309 | ||
|
|
8369a09930 | ||
|
|
4143b1d676 | ||
|
|
36d5dbbb95 | ||
|
|
f81a7b3438 |
@@ -219,13 +219,15 @@ class CreateWatch(Resource):
|
||||
|
||||
extras = copy.deepcopy(json_data)
|
||||
|
||||
# Because we renamed 'tag' to 'tags' but dont want to change the API (can do this in v2 of the API)
|
||||
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
|
||||
tags = None
|
||||
if extras.get('tag'):
|
||||
extras['tags'] = extras.get('tag')
|
||||
tags = extras.get('tag')
|
||||
del extras['tag']
|
||||
|
||||
del extras['url']
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras)
|
||||
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
|
||||
if new_uuid:
|
||||
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
|
||||
return {'uuid': new_uuid}, 201
|
||||
|
||||
@@ -76,6 +76,16 @@ def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
flash(f"Tag unlinked removed from {unlinked} watches")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/delete_all", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def delete_all():
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
watch['tags'] = []
|
||||
datastore.data['settings']['application']['tags'] = {}
|
||||
|
||||
flash(f"All tags deleted")
|
||||
return redirect(url_for('tags.tags_overview_page'))
|
||||
|
||||
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
|
||||
@login_optionally_required
|
||||
def form_tag_edit(uuid):
|
||||
|
||||
@@ -85,7 +85,8 @@ class import_distill_io_json(Importer):
|
||||
now = time.time()
|
||||
self.new_uuids=[]
|
||||
|
||||
|
||||
# @todo Use JSONSchema like in the API to validate here.
|
||||
|
||||
try:
|
||||
data = json.loads(data.strip())
|
||||
except json.decoder.JSONDecodeError:
|
||||
@@ -120,11 +121,8 @@ class import_distill_io_json(Importer):
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
# Does this need to be here anymore?
|
||||
if d.get('tags', False):
|
||||
extras['tags'] = ", ".join(d['tags'])
|
||||
|
||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||
tag=",".join(d.get('tags', [])),
|
||||
extras=extras,
|
||||
write_to_disk_now=False)
|
||||
|
||||
|
||||
@@ -205,10 +205,9 @@ class ChangeDetectionStore:
|
||||
|
||||
# Clone a watch by UUID
|
||||
def clone(self, uuid):
|
||||
url = self.data['watching'][uuid]['url']
|
||||
tag = self.data['watching'][uuid].get('tags',[])
|
||||
url = self.data['watching'][uuid].get('url')
|
||||
extras = self.data['watching'][uuid]
|
||||
new_uuid = self.add_watch(url=url, tag_uuids=tag, extras=extras)
|
||||
new_uuid = self.add_watch(url=url, extras=extras)
|
||||
return new_uuid
|
||||
|
||||
def url_exists(self, url):
|
||||
@@ -248,12 +247,9 @@ class ChangeDetectionStore:
|
||||
if extras is None:
|
||||
extras = {}
|
||||
|
||||
# should always be str
|
||||
if tag is None or not tag:
|
||||
tag = ''
|
||||
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
|
||||
|
||||
# Was it a share link? try to fetch the data
|
||||
if (url.startswith("https://changedetection.io/share/")):
|
||||
@@ -303,20 +299,22 @@ class ChangeDetectionStore:
|
||||
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
|
||||
return None
|
||||
|
||||
|
||||
# #Re 569
|
||||
# Could be in 'tags', var or extras, smash them together and strip
|
||||
apply_extras['tags'] = []
|
||||
if tag or extras.get('tags'):
|
||||
tags = list(filter(None, list(set().union(tag.split(','), extras.get('tags', '').split(',')))))
|
||||
for t in list(map(str.strip, tags)):
|
||||
if tag and type(tag) == str:
|
||||
# Then it's probably a string of the actual tag by name, split and add it
|
||||
for t in tag.split(','):
|
||||
# for each stripped tag, add tag as UUID
|
||||
apply_extras['tags'].append(self.add_tag(t))
|
||||
for a_t in t.split(','):
|
||||
tag_uuid = self.add_tag(a_t)
|
||||
apply_extras['tags'].append(tag_uuid)
|
||||
|
||||
# Or if UUIDs given directly
|
||||
if tag_uuids:
|
||||
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
|
||||
|
||||
# Make any uuids unique
|
||||
if apply_extras.get('tags'):
|
||||
apply_extras['tags'] = list(set(apply_extras.get('tags')))
|
||||
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
|
||||
|
||||
new_uuid = new_watch.get('uuid')
|
||||
|
||||
@@ -267,7 +267,7 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
|
||||
#live_server_setup(live_server)
|
||||
api_key = extract_api_key_from_UI(client)
|
||||
time.sleep(1)
|
||||
|
||||
# Create a watch
|
||||
set_original_response()
|
||||
test_url = url_for('test_endpoint', _external=True,
|
||||
@@ -283,7 +283,6 @@ def test_api_watch_PUT_update(client, live_server):
|
||||
|
||||
assert res.status_code == 201
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
# Get a listing, it will be the first one
|
||||
res = client.get(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
|
||||
import os
|
||||
|
||||
|
||||
@@ -154,6 +154,10 @@ def test_tag_add_in_ui(client, live_server):
|
||||
)
|
||||
assert b"Tag added" in res.data
|
||||
assert b"new-test-tag" in res.data
|
||||
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
@@ -219,12 +223,10 @@ def test_group_tag_notification(client, live_server):
|
||||
assert "test-tag" in notification_submission
|
||||
assert "other-tag" in notification_submission
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
#@todo Test that multiple notifications fired
|
||||
#@todo Test that each of multiple notifications with different settings
|
||||
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_limit_tag_ui(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
@@ -260,3 +262,61 @@ def test_limit_tag_ui(client, live_server):
|
||||
assert b'test-tag' in res.data
|
||||
assert res.data.count(b'processor-text_json_diff') == 20
|
||||
assert b"object at" not in res.data
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
def test_clone_tag_on_import(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url + " test-tag, another-tag\r\n"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'test-tag' in res.data
|
||||
assert b'another-tag' in res.data
|
||||
|
||||
watch_uuid = extract_UUID_from_client(client)
|
||||
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||
|
||||
assert b'Cloned' in res.data
|
||||
# 2 times plus the top link to tag
|
||||
assert res.data.count(b'test-tag') == 3
|
||||
assert res.data.count(b'another-tag') == 3
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
def test_clone_tag_on_quickwatchform_add(client, live_server):
|
||||
#live_server_setup(live_server)
|
||||
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": ' test-tag, another-tag '},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'test-tag' in res.data
|
||||
assert b'another-tag' in res.data
|
||||
|
||||
watch_uuid = extract_UUID_from_client(client)
|
||||
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
|
||||
|
||||
assert b'Cloned' in res.data
|
||||
# 2 times plus the top link to tag
|
||||
assert res.data.count(b'test-tag') == 3
|
||||
assert res.data.count(b'another-tag') == 3
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
|
||||
assert b'All tags deleted' in res.data
|
||||
@@ -112,6 +112,7 @@ def test_import_distillio(client, live_server):
|
||||
# did the tags work?
|
||||
res = client.get( url_for("index"))
|
||||
|
||||
# check tags
|
||||
assert b"nice stuff" in res.data
|
||||
assert b"nerd-news" in res.data
|
||||
|
||||
|
||||
Reference in New Issue
Block a user