mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2026-01-10 01:00:22 +00:00
Compare commits
2 Commits
resilient-
...
RSS-per-wa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff9b4fc32c | ||
|
|
9a6a131985 |
@@ -95,6 +95,14 @@ def construct_main_feed_routes(rss_blueprint, datastore):
|
||||
dt = dt.replace(tzinfo=pytz.UTC)
|
||||
fe.pubDate(dt)
|
||||
|
||||
# Add categories based on watch tags
|
||||
for tag_uuid in watch.get('tags', []):
|
||||
tag = datastore.data['settings']['application'].get('tags', {}).get(tag_uuid)
|
||||
if tag:
|
||||
tag_title = tag.get('title', '')
|
||||
if tag_title:
|
||||
fe.category(term=tag_title)
|
||||
|
||||
response = make_response(fg.rss_str())
|
||||
response.headers.set('Content-Type', 'application/rss+xml;charset=utf-8')
|
||||
logger.trace(f"RSS generated in {time.time() - now:.3f}s")
|
||||
|
||||
@@ -2,6 +2,7 @@ from flask import make_response, request, url_for
|
||||
from feedgen.feed import FeedGenerator
|
||||
import datetime
|
||||
import pytz
|
||||
import locale
|
||||
|
||||
from ._util import generate_watch_guid, generate_watch_diff_content
|
||||
|
||||
@@ -53,7 +54,16 @@ def construct_single_watch_routes(rss_blueprint, datastore):
|
||||
|
||||
# Create RSS feed
|
||||
fg = FeedGenerator()
|
||||
fg.title(f'changedetection.io - {watch.label}')
|
||||
|
||||
# Set title: use "label (url)" if label differs from url, otherwise just url
|
||||
watch_url = watch.get('url', '')
|
||||
watch_label = watch.label
|
||||
if watch_label and watch_label != watch_url:
|
||||
feed_title = f'changedetection.io - {watch_label} ({watch_url})'
|
||||
else:
|
||||
feed_title = f'changedetection.io - {watch_url}'
|
||||
|
||||
fg.title(feed_title)
|
||||
fg.description('Changes')
|
||||
fg.link(href='https://changedetection.io')
|
||||
|
||||
@@ -70,15 +80,33 @@ def construct_single_watch_routes(rss_blueprint, datastore):
|
||||
|
||||
try:
|
||||
# Generate the diff content for this pair of snapshots
|
||||
timestamp_to = dates[date_index_to]
|
||||
timestamp_from = dates[date_index_from]
|
||||
|
||||
content, watch_label = generate_watch_diff_content(
|
||||
watch, dates, rss_content_format, datastore,
|
||||
date_index_from=date_index_from,
|
||||
date_index_to=date_index_to
|
||||
)
|
||||
|
||||
# Generate edit watch link and add to content
|
||||
edit_watch_url = url_for('ui.ui_edit.edit_page',
|
||||
uuid=watch['uuid'],
|
||||
_external=True)
|
||||
|
||||
# Add edit watch links at top and bottom of content
|
||||
if 'html' in rss_content_format:
|
||||
edit_link_html = f'<p><a href="{edit_watch_url}">[edit watch]</a></p>'
|
||||
# Insert after <body> and before </body>
|
||||
content = content.replace('<body>', f'<body>\n{edit_link_html}', 1)
|
||||
content = content.replace('</body>', f'{edit_link_html}\n</body>', 1)
|
||||
else:
|
||||
# For plain text format, add plain text links in separate <pre> blocks
|
||||
edit_link_top = f'<pre>[edit watch] {edit_watch_url}</pre>\n'
|
||||
edit_link_bottom = f'\n<pre>[edit watch] {edit_watch_url}</pre>'
|
||||
content = edit_link_top + content + edit_link_bottom
|
||||
|
||||
# Create a unique GUID for this specific diff
|
||||
timestamp_to = dates[date_index_to]
|
||||
timestamp_from = dates[date_index_from]
|
||||
guid = f"{watch['uuid']}/{timestamp_to}"
|
||||
|
||||
fe = fg.add_entry()
|
||||
@@ -91,16 +119,37 @@ def construct_single_watch_routes(rss_blueprint, datastore):
|
||||
_external=True)}
|
||||
fe.link(link=diff_link)
|
||||
|
||||
# Add timestamp info to title to distinguish different diffs
|
||||
fe.title(title=f"{watch_label} - Change {i+1}")
|
||||
# Format the date using locale-aware formatting with timezone
|
||||
dt = datetime.datetime.fromtimestamp(int(timestamp_to))
|
||||
dt = dt.replace(tzinfo=pytz.UTC)
|
||||
|
||||
# Get local timezone-aware datetime
|
||||
local_tz = datetime.datetime.now().astimezone().tzinfo
|
||||
local_dt = dt.astimezone(local_tz)
|
||||
|
||||
# Format date with timezone - using strftime for locale awareness
|
||||
try:
|
||||
formatted_date = local_dt.strftime('%Y-%m-%d %H:%M:%S %Z')
|
||||
except:
|
||||
# Fallback if locale issues
|
||||
formatted_date = local_dt.isoformat()
|
||||
|
||||
# Use formatted date in title instead of "Change 1, 2, 3"
|
||||
fe.title(title=f"{watch_label} - Change @ {formatted_date}")
|
||||
fe.content(content=content, type='CDATA')
|
||||
fe.guid(guid, permalink=False)
|
||||
|
||||
# Use the timestamp of the "to" snapshot for pubDate
|
||||
dt = datetime.datetime.fromtimestamp(int(timestamp_to))
|
||||
dt = dt.replace(tzinfo=pytz.UTC)
|
||||
fe.pubDate(dt)
|
||||
|
||||
# Add categories based on watch tags
|
||||
for tag_uuid in watch.get('tags', []):
|
||||
tag = datastore.data['settings']['application'].get('tags', {}).get(tag_uuid)
|
||||
if tag:
|
||||
tag_title = tag.get('title', '')
|
||||
if tag_title:
|
||||
fe.category(term=tag_title)
|
||||
|
||||
except (IndexError, FileNotFoundError) as e:
|
||||
# Skip this diff if we can't generate it
|
||||
continue
|
||||
|
||||
@@ -81,6 +81,14 @@ def construct_tag_routes(rss_blueprint, datastore):
|
||||
dt = dt.replace(tzinfo=pytz.UTC)
|
||||
fe.pubDate(dt)
|
||||
|
||||
# Add categories based on watch tags
|
||||
for tag_uuid in watch.get('tags', []):
|
||||
tag = datastore.data['settings']['application'].get('tags', {}).get(tag_uuid)
|
||||
if tag:
|
||||
tag_title = tag.get('title', '')
|
||||
if tag_title:
|
||||
fe.category(term=tag_title)
|
||||
|
||||
response = make_response(fg.rss_str())
|
||||
response.headers.set('Content-Type', 'application/rss+xml;charset=utf-8')
|
||||
return response
|
||||
|
||||
245
changedetectionio/tests/test_rss_single_watch.py
Normal file
245
changedetectionio/tests/test_rss_single_watch.py
Normal file
@@ -0,0 +1,245 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, extract_UUID_from_client, delete_all_watches
|
||||
|
||||
|
||||
def test_rss_single_watch_order(client, live_server, measure_memory_usage, datastore_path):
|
||||
"""
|
||||
Test that single watch RSS feed shows changes in correct order (newest first).
|
||||
"""
|
||||
|
||||
# Create initial content
|
||||
def set_response(datastore_path, version):
|
||||
test_return_data = f"""<html>
|
||||
<body>
|
||||
<p>Version {version} content</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
with open(os.path.join(datastore_path, "endpoint-content.txt"), "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
# Start with version 1
|
||||
set_response(datastore_path, 1)
|
||||
|
||||
# Add a watch
|
||||
test_url = url_for('test_endpoint', _external=True) + "?order_test=1"
|
||||
res = client.post(
|
||||
url_for("ui.ui_views.form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": 'test-tag'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Get the watch UUID
|
||||
watch_uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Wait for initial check
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Create multiple versions by triggering changes
|
||||
for version in range(2, 6): # Create versions 2, 3, 4, 5
|
||||
set_response(datastore_path, version)
|
||||
res = client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
time.sleep(0.5) # Small delay to ensure different timestamps
|
||||
|
||||
# Get RSS token
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
|
||||
# Request RSS feed for the single watch
|
||||
res = client.get(
|
||||
url_for("rss.rss_single_watch", uuid=watch_uuid, token=rss_token, _external=True),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Should return valid RSS
|
||||
assert res.status_code == 200
|
||||
assert b"<?xml" in res.data or b"<rss" in res.data
|
||||
|
||||
# Parse the RSS/XML
|
||||
root = ET.fromstring(res.data)
|
||||
|
||||
# Find all items (RSS 2.0) or entries (Atom)
|
||||
items = root.findall('.//item')
|
||||
if not items:
|
||||
items = root.findall('.//{http://www.w3.org/2005/Atom}entry')
|
||||
|
||||
# Should have multiple items
|
||||
assert len(items) >= 3, f"Expected at least 3 items, got {len(items)}"
|
||||
|
||||
# Get the descriptions/content from first 3 items
|
||||
descriptions = []
|
||||
for item in items[:3]:
|
||||
# Try RSS format first
|
||||
desc = item.findtext('description')
|
||||
if not desc:
|
||||
# Try Atom format
|
||||
content_elem = item.find('{http://www.w3.org/2005/Atom}content')
|
||||
if content_elem is not None:
|
||||
desc = content_elem.text
|
||||
descriptions.append(desc if desc else "")
|
||||
|
||||
print(f"First item content: {descriptions[0][:100] if descriptions[0] else 'None'}")
|
||||
print(f"Second item content: {descriptions[1][:100] if descriptions[1] else 'None'}")
|
||||
print(f"Third item content: {descriptions[2][:100] if descriptions[2] else 'None'}")
|
||||
|
||||
# The FIRST item should contain the NEWEST change (Version 5)
|
||||
# The SECOND item should contain Version 4
|
||||
# The THIRD item should contain Version 3
|
||||
assert b"Version 5" in descriptions[0].encode() or "Version 5" in descriptions[0], \
|
||||
f"First item should show newest change (Version 5), but got: {descriptions[0][:200]}"
|
||||
|
||||
# Verify the order is correct
|
||||
assert b"Version 4" in descriptions[1].encode() or "Version 4" in descriptions[1], \
|
||||
f"Second item should show Version 4, but got: {descriptions[1][:200]}"
|
||||
|
||||
assert b"Version 3" in descriptions[2].encode() or "Version 3" in descriptions[2], \
|
||||
f"Third item should show Version 3, but got: {descriptions[2][:200]}"
|
||||
|
||||
# Clean up
|
||||
delete_all_watches(client)
|
||||
|
||||
|
||||
def test_rss_categories_from_tags(client, live_server, measure_memory_usage, datastore_path):
|
||||
"""
|
||||
Test that RSS feeds include category tags from watch tags.
|
||||
"""
|
||||
|
||||
# Create initial content
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
<p>Test content for RSS categories</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
with open(os.path.join(datastore_path, "endpoint-content.txt"), "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
# Create some tags first
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_add"),
|
||||
data={"name": "Security"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_add"),
|
||||
data={"name": "Python"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
res = client.post(
|
||||
url_for("tags.form_tag_add"),
|
||||
data={"name": "Tech News"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
# Add a watch with tags
|
||||
test_url = url_for('test_endpoint', _external=True) + "?category_test=1"
|
||||
res = client.post(
|
||||
url_for("ui.ui_views.form_quick_watch_add"),
|
||||
data={"url": test_url, "tags": "Security, Python, Tech News"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Get the watch UUID
|
||||
watch_uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Wait for initial check
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Trigger one change
|
||||
test_return_data_v2 = """<html>
|
||||
<body>
|
||||
<p>Updated content for RSS categories</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
with open(os.path.join(datastore_path, "endpoint-content.txt"), "w") as f:
|
||||
f.write(test_return_data_v2)
|
||||
|
||||
res = client.get(url_for("ui.form_watch_checknow"), follow_redirects=True)
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# Get RSS token
|
||||
rss_token = extract_rss_token_from_UI(client)
|
||||
|
||||
# Test 1: Check single watch RSS feed
|
||||
res = client.get(
|
||||
url_for("rss.rss_single_watch", uuid=watch_uuid, token=rss_token, _external=True),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert res.status_code == 200
|
||||
assert b"<?xml" in res.data or b"<rss" in res.data
|
||||
|
||||
# Parse the RSS/XML
|
||||
root = ET.fromstring(res.data)
|
||||
|
||||
# Find all items
|
||||
items = root.findall('.//item')
|
||||
assert len(items) >= 1, "Expected at least 1 item in RSS feed"
|
||||
|
||||
# Get categories from first item
|
||||
categories = [cat.text for cat in items[0].findall('category')]
|
||||
|
||||
print(f"Found categories in single watch RSS: {categories}")
|
||||
|
||||
# Should have all three categories
|
||||
assert "Security" in categories, f"Expected 'Security' category, got: {categories}"
|
||||
assert "Python" in categories, f"Expected 'Python' category, got: {categories}"
|
||||
assert "Tech News" in categories, f"Expected 'Tech News' category, got: {categories}"
|
||||
assert len(categories) == 3, f"Expected 3 categories, got {len(categories)}: {categories}"
|
||||
|
||||
# Test 2: Check main RSS feed
|
||||
res = client.get(
|
||||
url_for("rss.feed", token=rss_token, _external=True),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert res.status_code == 200
|
||||
|
||||
root = ET.fromstring(res.data)
|
||||
items = root.findall('.//item')
|
||||
assert len(items) >= 1, "Expected at least 1 item in main RSS feed"
|
||||
|
||||
# Get categories from first item in main feed
|
||||
categories = [cat.text for cat in items[0].findall('category')]
|
||||
|
||||
print(f"Found categories in main RSS feed: {categories}")
|
||||
|
||||
# Should have all three categories
|
||||
assert "Security" in categories, f"Expected 'Security' category in main feed, got: {categories}"
|
||||
assert "Python" in categories, f"Expected 'Python' category in main feed, got: {categories}"
|
||||
assert "Tech News" in categories, f"Expected 'Tech News' category in main feed, got: {categories}"
|
||||
|
||||
# Test 3: Check tag-specific RSS feed (should also have categories)
|
||||
# Get the tag UUID for "Security" and verify the tag feed also has categories
|
||||
from .util import get_UUID_for_tag_name
|
||||
security_tag_uuid = get_UUID_for_tag_name(client, name="Security")
|
||||
|
||||
if security_tag_uuid:
|
||||
res = client.get(
|
||||
url_for("rss.rss_tag_feed", tag_uuid=security_tag_uuid, token=rss_token, _external=True),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert res.status_code == 200
|
||||
|
||||
root = ET.fromstring(res.data)
|
||||
items = root.findall('.//item')
|
||||
|
||||
if len(items) >= 1:
|
||||
categories = [cat.text for cat in items[0].findall('category')]
|
||||
print(f"Found categories in tag RSS feed: {categories}")
|
||||
|
||||
# Should still have all three categories
|
||||
assert "Security" in categories, f"Expected 'Security' category in tag feed, got: {categories}"
|
||||
assert "Python" in categories, f"Expected 'Python' category in tag feed, got: {categories}"
|
||||
assert "Tech News" in categories, f"Expected 'Tech News' category in tag feed, got: {categories}"
|
||||
|
||||
# Clean up
|
||||
delete_all_watches(client)
|
||||
Reference in New Issue
Block a user