Compare commits

..

1 Commits

Author SHA1 Message Date
dgtlmoon
b72ecfa852 WIP 2023-06-23 15:22:13 +02:00
11 changed files with 41 additions and 104 deletions

View File

@@ -219,15 +219,13 @@ class CreateWatch(Resource):
extras = copy.deepcopy(json_data)
# Because we renamed 'tag' to 'tags' but don't want to change the API (can do this in v2 of the API)
tags = None
# Because we renamed 'tag' to 'tags' but dont want to change the API (can do this in v2 of the API)
if extras.get('tag'):
tags = extras.get('tag')
del extras['tag']
extras['tags'] = extras.get('tag')
del extras['url']
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags)
new_uuid = self.datastore.add_watch(url=url, extras=extras)
if new_uuid:
self.update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': new_uuid, 'skip_when_checksum_same': True}))
return {'uuid': new_uuid}, 201

View File

@@ -76,16 +76,6 @@ def construct_blueprint(datastore: ChangeDetectionStore):
flash(f"Tag unlinked removed from {unlinked} watches")
return redirect(url_for('tags.tags_overview_page'))
@tags_blueprint.route("/delete_all", methods=['GET'])
@login_optionally_required
def delete_all():
for watch_uuid, watch in datastore.data['watching'].items():
watch['tags'] = []
datastore.data['settings']['application']['tags'] = {}
flash(f"All tags deleted")
return redirect(url_for('tags.tags_overview_page'))
@tags_blueprint.route("/edit/<string:uuid>", methods=['GET'])
@login_optionally_required
def form_tag_edit(uuid):

View File

@@ -316,10 +316,11 @@ class ValidateCSSJSONXPATHInput(object):
if not self.allow_xpath:
raise ValidationError("XPath not permitted in this field!")
from lxml import etree, html
import elementpath
tree = html.fromstring("<html></html>")
try:
tree.xpath(line.strip())
elementpath.select(tree, line)
except etree.XPathEvalError as e:
message = field.gettext('\'%s\' is not a valid XPath expression. (%s)')
raise ValidationError(message % (line, str(e)))

View File

@@ -51,12 +51,13 @@ def element_removal(selectors: List[str], html_content):
# Return str Utf-8 of matched rules
def xpath_filter(xpath_filter, html_content, append_pretty_line_formatting=False):
import elementpath
from lxml import etree, html
tree = html.fromstring(bytes(html_content, encoding='utf-8'))
html_block = ""
r = tree.xpath(xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'})
r = elementpath.select(tree, xpath_filter.strip())
#@note: //title/text() wont work where <title>CDATA..
for element in r:

View File

@@ -85,8 +85,7 @@ class import_distill_io_json(Importer):
now = time.time()
self.new_uuids=[]
# @todo Use JSONSchema like in the API to validate here.
try:
data = json.loads(data.strip())
except json.decoder.JSONDecodeError:
@@ -121,8 +120,11 @@ class import_distill_io_json(Importer):
except IndexError:
pass
# Does this need to be here anymore?
if d.get('tags', False):
extras['tags'] = ", ".join(d['tags'])
new_uuid = datastore.add_watch(url=d['uri'].strip(),
tag=",".join(d.get('tags', [])),
extras=extras,
write_to_disk_now=False)

View File

@@ -205,9 +205,10 @@ class ChangeDetectionStore:
# Clone a watch by UUID
def clone(self, uuid):
url = self.data['watching'][uuid].get('url')
url = self.data['watching'][uuid]['url']
tag = self.data['watching'][uuid].get('tags',[])
extras = self.data['watching'][uuid]
new_uuid = self.add_watch(url=url, extras=extras)
new_uuid = self.add_watch(url=url, tag_uuids=tag, extras=extras)
return new_uuid
def url_exists(self, url):
@@ -247,9 +248,12 @@ class ChangeDetectionStore:
if extras is None:
extras = {}
# should always be str
if tag is None or not tag:
tag = ''
# Incase these are copied across, assume it's a reference and deepcopy()
apply_extras = deepcopy(extras)
apply_extras['tags'] = [] if not apply_extras.get('tags') else apply_extras.get('tags')
# Was it a share link? try to fetch the data
if (url.startswith("https://changedetection.io/share/")):
@@ -299,22 +303,20 @@ class ChangeDetectionStore:
flash('Watch protocol is not permitted by SAFE_PROTOCOL_REGEX', 'error')
return None
if tag and type(tag) == str:
# Then it's probably a string of the actual tag by name, split and add it
for t in tag.split(','):
# #Re 569
# Could be in 'tags', var or extras, smash them together and strip
apply_extras['tags'] = []
if tag or extras.get('tags'):
tags = list(filter(None, list(set().union(tag.split(','), extras.get('tags', '').split(',')))))
for t in list(map(str.strip, tags)):
# for each stripped tag, add tag as UUID
for a_t in t.split(','):
tag_uuid = self.add_tag(a_t)
apply_extras['tags'].append(tag_uuid)
apply_extras['tags'].append(self.add_tag(t))
# Or if UUIDs given directly
if tag_uuids:
apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids))
# Make any uuids unique
if apply_extras.get('tags'):
apply_extras['tags'] = list(set(apply_extras.get('tags')))
new_watch = Watch.model(datastore_path=self.datastore_path, url=url)
new_uuid = new_watch.get('uuid')

View File

@@ -267,7 +267,7 @@ def test_api_watch_PUT_update(client, live_server):
#live_server_setup(live_server)
api_key = extract_api_key_from_UI(client)
time.sleep(1)
# Create a watch
set_original_response()
test_url = url_for('test_endpoint', _external=True,
@@ -283,6 +283,7 @@ def test_api_watch_PUT_update(client, live_server):
assert res.status_code == 201
time.sleep(1)
# Get a listing, it will be the first one
res = client.get(

View File

@@ -2,7 +2,7 @@
import time
from flask import url_for
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name, extract_UUID_from_client
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, get_UUID_for_tag_name
import os
@@ -154,10 +154,6 @@ def test_tag_add_in_ui(client, live_server):
)
assert b"Tag added" in res.data
assert b"new-test-tag" in res.data
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
assert b'All tags deleted' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
@@ -223,11 +219,13 @@ def test_group_tag_notification(client, live_server):
assert "test-tag" in notification_submission
assert "other-tag" in notification_submission
#@todo Test that multiple notifications fired
#@todo Test that each of multiple notifications with different settings
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
#@todo Test that multiple notifications fired
#@todo Test that each of multiple notifications with different settings
def test_limit_tag_ui(client, live_server):
#live_server_setup(live_server)
@@ -262,61 +260,3 @@ def test_limit_tag_ui(client, live_server):
assert b'test-tag' in res.data
assert res.data.count(b'processor-text_json_diff') == 20
assert b"object at" not in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
assert b'All tags deleted' in res.data
def test_clone_tag_on_import(client, live_server):
#live_server_setup(live_server)
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url + " test-tag, another-tag\r\n"},
follow_redirects=True
)
assert b"1 Imported" in res.data
res = client.get(url_for("index"))
assert b'test-tag' in res.data
assert b'another-tag' in res.data
watch_uuid = extract_UUID_from_client(client)
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
assert b'Cloned' in res.data
# 2 times plus the top link to tag
assert res.data.count(b'test-tag') == 3
assert res.data.count(b'another-tag') == 3
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_clone_tag_on_quickwatchform_add(client, live_server):
#live_server_setup(live_server)
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tags": ' test-tag, another-tag '},
follow_redirects=True
)
assert b"Watch added" in res.data
res = client.get(url_for("index"))
assert b'test-tag' in res.data
assert b'another-tag' in res.data
watch_uuid = extract_UUID_from_client(client)
res = client.get(url_for("form_clone", uuid=watch_uuid), follow_redirects=True)
assert b'Cloned' in res.data
# 2 times plus the top link to tag
assert res.data.count(b'test-tag') == 3
assert res.data.count(b'another-tag') == 3
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
res = client.get(url_for("tags.delete_all"), follow_redirects=True)
assert b'All tags deleted' in res.data

View File

@@ -112,7 +112,6 @@ def test_import_distillio(client, live_server):
# did the tags work?
res = client.get( url_for("index"))
# check tags
assert b"nice stuff" in res.data
assert b"nerd-news" in res.data

View File

@@ -2,7 +2,7 @@
import time
from flask import url_for
from . util import live_server_setup
from .util import live_server_setup, wait_for_all_checks
from ..html_tools import *
@@ -164,6 +164,7 @@ def test_check_xpath_text_function_utf8(client, live_server):
assert b'Deleted' in res.data
def test_check_markup_xpath_filter_restriction(client, live_server):
live_server_setup(live_server)
sleep_time_for_fetch_thread = 3
xpath_filter = "//*[contains(@class, 'sametext')]"
@@ -183,7 +184,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
assert b"1 Imported" in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# Goto the edit page, add our ignore text
# Add our URL to the import page
@@ -195,7 +196,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
assert b"Updated watch." in res.data
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
# view it/reset state back to viewed
client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True)
@@ -206,7 +207,7 @@ def test_check_markup_xpath_filter_restriction(client, live_server):
# Trigger a check
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'unviewed' not in res.data

View File

@@ -42,6 +42,8 @@ paho-mqtt
# (introduced once apprise became a dep)
cryptography~=3.4
elementpath
# Used for CSS filtering
beautifulsoup4