mirror of
https://github.com/dgtlmoon/changedetection.io.git
synced 2025-12-05 23:55:32 +00:00
Compare commits
8 Commits
memusage-e
...
ui-tweaks
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0dc18b849 | ||
|
|
a1c3107cd6 | ||
|
|
8fef3ff4ab | ||
|
|
baa25c9f9e | ||
|
|
488699b7d4 | ||
|
|
cf3a1ee3e3 | ||
|
|
daae43e9f9 | ||
|
|
cdeedaa65c |
@@ -11,7 +11,7 @@ Live your data-life *pro-actively* instead of *re-actively*.
|
||||
|
||||
Free, Open-source web page monitoring, notification and change detection. Don't have time? [**Try our $6.99/month subscription - unlimited checks and watches!**](https://lemonade.changedetection.io/start)
|
||||
|
||||
[](https://discord.gg/vUNt4EtWMF) [ ](https://www.youtube.com/channel/UCbS09q1TRf0o4N2t-WA3emQ) [](https://www.linkedin.com/company/changedetection-io/)
|
||||
[](https://discord.gg/XJZy7QK3ja) [ ](https://www.youtube.com/channel/UCbS09q1TRf0o4N2t-WA3emQ) [](https://www.linkedin.com/company/changedetection-io/)
|
||||
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start)
|
||||
|
||||
@@ -44,7 +44,7 @@ from flask_wtf import CSRFProtect
|
||||
from changedetectionio import html_tools
|
||||
from changedetectionio.api import api_v1
|
||||
|
||||
__version__ = '0.39.17'
|
||||
__version__ = '0.39.17.1'
|
||||
|
||||
datastore = None
|
||||
|
||||
@@ -54,7 +54,7 @@ ticker_thread = None
|
||||
|
||||
extra_stylesheets = []
|
||||
|
||||
update_q = queue.Queue()
|
||||
update_q = queue.PriorityQueue()
|
||||
|
||||
notification_q = queue.Queue()
|
||||
|
||||
@@ -370,20 +370,20 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
from changedetectionio import forms
|
||||
|
||||
limit_tag = request.args.get('tag')
|
||||
pause_uuid = request.args.get('pause')
|
||||
|
||||
# Redirect for the old rss path which used the /?rss=true
|
||||
if request.args.get('rss'):
|
||||
return redirect(url_for('rss', tag=limit_tag))
|
||||
|
||||
if pause_uuid:
|
||||
try:
|
||||
datastore.data['watching'][pause_uuid]['paused'] ^= True
|
||||
datastore.needs_write = True
|
||||
op = request.args.get('op')
|
||||
if op:
|
||||
uuid = request.args.get('uuid')
|
||||
if op == 'pause':
|
||||
datastore.data['watching'][uuid]['paused'] ^= True
|
||||
elif op == 'mute':
|
||||
datastore.data['watching'][uuid]['notification_muted'] ^= True
|
||||
|
||||
return redirect(url_for('index', tag = limit_tag))
|
||||
except KeyError:
|
||||
pass
|
||||
datastore.needs_write = True
|
||||
return redirect(url_for('index', tag = limit_tag))
|
||||
|
||||
# Sort by last_changed and add the uuid which is usually the key..
|
||||
sorted_watches = []
|
||||
@@ -406,7 +406,6 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
existing_tags = datastore.get_all_tags()
|
||||
|
||||
form = forms.quickWatchForm(request.form)
|
||||
|
||||
output = render_template("watch-overview.html",
|
||||
form=form,
|
||||
watches=sorted_watches,
|
||||
@@ -417,7 +416,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# Don't link to hosting when we're on the hosting environment
|
||||
hosted_sticky=os.getenv("SALTED_PASS", False) == False,
|
||||
guid=datastore.data['app_guid'],
|
||||
queued_uuids=update_q.queue)
|
||||
queued_uuids=[uuid for p,uuid in update_q.queue])
|
||||
|
||||
|
||||
if session.get('share-link'):
|
||||
@@ -631,18 +630,14 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
# But in the case something is added we should save straight away
|
||||
datastore.needs_write_urgent = True
|
||||
|
||||
# Queue the watch for immediate recheck
|
||||
update_q.put(uuid)
|
||||
# Queue the watch for immediate recheck, with a higher priority
|
||||
update_q.put((1, uuid))
|
||||
|
||||
# Diff page [edit] link should go back to diff page
|
||||
if request.args.get("next") and request.args.get("next") == 'diff' and not form.save_and_preview_button.data:
|
||||
if request.args.get("next") and request.args.get("next") == 'diff':
|
||||
return redirect(url_for('diff_history_page', uuid=uuid))
|
||||
else:
|
||||
if form.save_and_preview_button.data:
|
||||
flash('You may need to reload this page to see the new content.')
|
||||
return redirect(url_for('preview_page', uuid=uuid))
|
||||
else:
|
||||
return redirect(url_for('index'))
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
else:
|
||||
if request.method == 'POST' and not form.validate():
|
||||
@@ -746,7 +741,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
importer = import_url_list()
|
||||
importer.run(data=request.values.get('urls'), flash=flash, datastore=datastore)
|
||||
for uuid in importer.new_uuids:
|
||||
update_q.put(uuid)
|
||||
update_q.put((1, uuid))
|
||||
|
||||
if len(importer.remaining_data) == 0:
|
||||
return redirect(url_for('index'))
|
||||
@@ -759,7 +754,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
d_importer = import_distill_io_json()
|
||||
d_importer.run(data=request.values.get('distill-io'), flash=flash, datastore=datastore)
|
||||
for uuid in d_importer.new_uuids:
|
||||
update_q.put(uuid)
|
||||
update_q.put((1, uuid))
|
||||
|
||||
|
||||
|
||||
@@ -877,42 +872,40 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
flash("No history found for the specified link, bad link?", "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
if watch.history_n >0:
|
||||
timestamps = sorted(watch.history.keys(), key=lambda x: int(x))
|
||||
filename = watch.history[timestamps[-1]]
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
tmp = f.readlines()
|
||||
|
||||
# Get what needs to be highlighted
|
||||
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
|
||||
timestamp = list(watch.history.keys())[-1]
|
||||
filename = watch.history[timestamp]
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
tmp = f.readlines()
|
||||
|
||||
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
|
||||
ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
|
||||
wordlist=ignore_rules,
|
||||
mode='line numbers'
|
||||
)
|
||||
# Get what needs to be highlighted
|
||||
ignore_rules = watch.get('ignore_text', []) + datastore.data['settings']['application']['global_ignore_text']
|
||||
|
||||
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
|
||||
wordlist=watch['trigger_text'],
|
||||
mode='line numbers'
|
||||
)
|
||||
# Prepare the classes and lines used in the template
|
||||
i=0
|
||||
for l in tmp:
|
||||
classes=[]
|
||||
i+=1
|
||||
if i in ignored_line_numbers:
|
||||
classes.append('ignored')
|
||||
if i in trigger_line_numbers:
|
||||
classes.append('triggered')
|
||||
content.append({'line': l, 'classes': ' '.join(classes)})
|
||||
# .readlines will keep the \n, but we will parse it here again, in the future tidy this up
|
||||
ignored_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
|
||||
wordlist=ignore_rules,
|
||||
mode='line numbers'
|
||||
)
|
||||
|
||||
trigger_line_numbers = html_tools.strip_ignore_text(content="".join(tmp),
|
||||
wordlist=watch['trigger_text'],
|
||||
mode='line numbers'
|
||||
)
|
||||
# Prepare the classes and lines used in the template
|
||||
i=0
|
||||
for l in tmp:
|
||||
classes=[]
|
||||
i+=1
|
||||
if i in ignored_line_numbers:
|
||||
classes.append('ignored')
|
||||
if i in trigger_line_numbers:
|
||||
classes.append('triggered')
|
||||
content.append({'line': l, 'classes': ' '.join(classes)})
|
||||
|
||||
except Exception as e:
|
||||
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
|
||||
|
||||
except Exception as e:
|
||||
content.append({'line': "File doesnt exist or unable to read file {}".format(filename), 'classes': ''})
|
||||
else:
|
||||
content.append({'line': "No history found", 'classes': ''})
|
||||
|
||||
screenshot_url = datastore.get_screenshot(uuid)
|
||||
system_uses_webdriver = datastore.data['settings']['application']['fetch_backend'] == 'html_webdriver'
|
||||
@@ -1090,7 +1083,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
if not add_paused and new_uuid:
|
||||
# Straight into the queue.
|
||||
update_q.put(new_uuid)
|
||||
update_q.put((1, new_uuid))
|
||||
flash("Watch added.")
|
||||
|
||||
if add_paused:
|
||||
@@ -1127,7 +1120,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
new_uuid = datastore.clone(uuid)
|
||||
update_q.put(new_uuid)
|
||||
update_q.put((5, new_uuid))
|
||||
flash('Cloned.')
|
||||
|
||||
return redirect(url_for('index'))
|
||||
@@ -1148,7 +1141,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
|
||||
if uuid:
|
||||
if uuid not in running_uuids:
|
||||
update_q.put(uuid)
|
||||
update_q.put((1, uuid))
|
||||
i = 1
|
||||
|
||||
elif tag != None:
|
||||
@@ -1156,7 +1149,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if (tag != None and tag in watch['tag']):
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(watch_uuid)
|
||||
update_q.put((1, watch_uuid))
|
||||
i += 1
|
||||
|
||||
else:
|
||||
@@ -1164,7 +1157,7 @@ def changedetection_app(config=None, datastore_o=None):
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(watch_uuid)
|
||||
update_q.put((1, watch_uuid))
|
||||
i += 1
|
||||
flash("{} watches are queued for rechecking.".format(i))
|
||||
return redirect(url_for('index', tag=tag))
|
||||
@@ -1368,14 +1361,14 @@ def ticker_thread_check_time_launch_checks():
|
||||
|
||||
seconds_since_last_recheck = now - watch['last_checked']
|
||||
if seconds_since_last_recheck >= (threshold + watch.jitter_seconds) and seconds_since_last_recheck >= recheck_time_minimum_seconds:
|
||||
if not uuid in running_uuids and uuid not in update_q.queue:
|
||||
print("Queued watch UUID {} last checked at {} queued at {:0.2f} jitter {:0.2f}s, {:0.2f}s since last checked".format(uuid,
|
||||
if not uuid in running_uuids and uuid not in [q_uuid for p,q_uuid in update_q.queue]:
|
||||
print("> Queued watch UUID {} last checked at {} queued at {:0.2f} jitter {:0.2f}s, {:0.2f}s since last checked".format(uuid,
|
||||
watch['last_checked'],
|
||||
now,
|
||||
watch.jitter_seconds,
|
||||
now - watch['last_checked']))
|
||||
# Into the queue with you
|
||||
update_q.put(uuid)
|
||||
update_q.put((5, uuid))
|
||||
|
||||
# Reset for next time
|
||||
watch.jitter_seconds = 0
|
||||
|
||||
@@ -24,7 +24,7 @@ class Watch(Resource):
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.args.get('recheck'):
|
||||
self.update_q.put(uuid)
|
||||
self.update_q.put((1, uuid))
|
||||
return "OK", 200
|
||||
|
||||
# Return without history, get that via another API call
|
||||
@@ -100,7 +100,7 @@ class CreateWatch(Resource):
|
||||
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
|
||||
self.update_q.put(new_uuid)
|
||||
self.update_q.put((1, new_uuid))
|
||||
return {'uuid': new_uuid}, 201
|
||||
|
||||
# Return concise list of available watches and some very basic info
|
||||
@@ -118,7 +118,7 @@ class CreateWatch(Resource):
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
self.update_q.put(uuid)
|
||||
self.update_q.put((1, uuid))
|
||||
return {'status': "OK"}, 200
|
||||
|
||||
return list, 200
|
||||
|
||||
@@ -547,43 +547,6 @@ class html_requests(Fetcher):
|
||||
self.headers = r.headers
|
||||
|
||||
|
||||
# "html_requests" is listed as the default fetcher in store.py!
|
||||
class html_fetcher_with_weird_memory_leak(Fetcher):
|
||||
fetcher_description = "HTTP Fetcher with unexplainable memory leak"
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
self.proxy_override = proxy_override
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_css_filter=None):
|
||||
|
||||
|
||||
self.status_code = 200
|
||||
|
||||
# Does nothing to help
|
||||
# with open('memory-leak.html', 'r', encoding="utf-8") as f:
|
||||
# with open('memory-leak.html', 'r') as f:
|
||||
|
||||
# Works but is binary (no good for me)
|
||||
with open('memory-leak.html', 'r') as f:
|
||||
wtf = f.read()
|
||||
|
||||
# just to prove gc.collect doesnt help, i dont even use 'wtf'
|
||||
del wtf
|
||||
wtf="not much"
|
||||
import gc
|
||||
gc.collect()
|
||||
|
||||
self.content = "<html>foobar</html>"
|
||||
self.headers = {}
|
||||
self.xpath_data = '{}'
|
||||
|
||||
# Decide which is the 'real' HTML webdriver, this is more a system wide config
|
||||
# rather than site-specific.
|
||||
use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False)
|
||||
|
||||
@@ -350,7 +350,7 @@ class watchForm(commonSettingsForm):
|
||||
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
|
||||
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
save_and_preview_button = SubmitField('Save & Preview', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
proxy = RadioField('Proxy')
|
||||
filter_failure_notification_send = BooleanField(
|
||||
'Send a notification when the filter can no longer be found on the page', default=False)
|
||||
|
||||
@@ -31,7 +31,7 @@ class model(dict):
|
||||
'base_url' : None,
|
||||
'extract_title_as_title': False,
|
||||
'empty_pages_are_a_change': False,
|
||||
'fetch_backend': 'html_fetcher_with_weird_memory_leak',
|
||||
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
|
||||
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
|
||||
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'global_subtractive_selectors': [],
|
||||
|
||||
@@ -36,6 +36,7 @@ class model(dict):
|
||||
'notification_title': default_notification_title,
|
||||
'notification_body': default_notification_body,
|
||||
'notification_format': default_notification_format,
|
||||
'notification_muted': False,
|
||||
'css_filter': '',
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'subtractive_selectors': [],
|
||||
|
||||
42
changedetectionio/static/images/bell-off.svg
Normal file
42
changedetectionio/static/images/bell-off.svg
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="15"
|
||||
height="16.363636"
|
||||
viewBox="0 0 15 16.363636"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
sodipodi:docname="bell-off.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview5"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="-0.59824046"
|
||||
inkscape:cy="12"
|
||||
inkscape:window-width="1554"
|
||||
inkscape:window-height="896"
|
||||
inkscape:window-x="2095"
|
||||
inkscape:window-y="107"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="svg4" />
|
||||
<defs
|
||||
id="defs8" />
|
||||
<path
|
||||
d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z"
|
||||
id="path2"
|
||||
style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
@@ -271,11 +271,15 @@ footer {
|
||||
#new-version-text a {
|
||||
color: #e07171; }
|
||||
|
||||
.paused-state.state-False img {
|
||||
opacity: 0.2; }
|
||||
|
||||
.paused-state.state-False:hover img {
|
||||
opacity: 0.8; }
|
||||
.watch-controls {
|
||||
/* default */ }
|
||||
.watch-controls .state-on img {
|
||||
opacity: 0.8; }
|
||||
.watch-controls img {
|
||||
opacity: 0.2; }
|
||||
.watch-controls img:hover {
|
||||
transition: opacity 0.3s;
|
||||
opacity: 0.8; }
|
||||
|
||||
.monospaced-textarea textarea {
|
||||
width: 100%;
|
||||
|
||||
@@ -362,14 +362,25 @@ footer {
|
||||
color: #e07171;
|
||||
}
|
||||
|
||||
.paused-state {
|
||||
&.state-False img {
|
||||
.watch-controls {
|
||||
.state-on {
|
||||
img {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
/* default */
|
||||
img {
|
||||
opacity: 0.2;
|
||||
}
|
||||
|
||||
&.state-False:hover img {
|
||||
opacity: 0.8;
|
||||
img {
|
||||
&:hover {
|
||||
transition: opacity 0.3s;
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.monospaced-textarea {
|
||||
|
||||
@@ -82,8 +82,9 @@ class ChangeDetectionStore:
|
||||
if include_default_watches:
|
||||
print("Creating JSON store at", self.datastore_path)
|
||||
|
||||
for i in range(50):
|
||||
self.add_watch(url='https://changedetection.io/CHANGELOG.txt?x='+str(i), tag='test')
|
||||
self.add_watch(url='http://www.quotationspage.com/random.php', tag='test')
|
||||
self.add_watch(url='https://news.ycombinator.com/', tag='Tech news')
|
||||
self.add_watch(url='https://changedetection.io/CHANGELOG.txt', tag='changedetection.io')
|
||||
|
||||
self.__data['version_tag'] = version_tag
|
||||
|
||||
|
||||
@@ -307,9 +307,7 @@ Unavailable") }}
|
||||
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
|
||||
{{ render_button(form.save_button) }} {{ render_button(form.save_and_preview_button) }}
|
||||
|
||||
{{ render_button(form.save_button) }}
|
||||
<a href="{{url_for('form_delete', uuid=uuid)}}"
|
||||
class="pure-button button-small button-error ">Delete</a>
|
||||
<a href="{{url_for('clear_watch_history', uuid=uuid)}}"
|
||||
|
||||
@@ -56,8 +56,10 @@
|
||||
{% if watch.newest_history_key| int > watch.last_viewed and watch.history_n>=2 %}unviewed{% endif %}
|
||||
{% if watch.uuid in queued_uuids %}queued{% endif %}">
|
||||
<td class="inline">{{ loop.index }}</td>
|
||||
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause" title="Pause"/></a></td>
|
||||
|
||||
<td class="inline watch-controls">
|
||||
<a class="state-{{'on' if watch.paused }}" href="{{url_for('index', op='pause', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause checks" title="Pause checks"/></a>
|
||||
<a class="state-{{'on' if watch.notification_muted}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications"/></a>
|
||||
</td>
|
||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a>
|
||||
<a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>
|
||||
|
||||
@@ -90,6 +90,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
assert b'Compare newest' in res.data
|
||||
|
||||
# Check the [preview] pulls the right one
|
||||
res = client.get(
|
||||
url_for("preview_page", uuid="first"),
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b'which has this one new line' in res.data
|
||||
assert b'Which is across multiple lines' not in res.data
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
# Do this a few times.. ensures we dont accidently set the status
|
||||
|
||||
@@ -28,13 +28,9 @@ def test_error_handler(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' not in res.data
|
||||
assert b'Status Code 403' in res.data
|
||||
@@ -53,9 +49,6 @@ def test_error_text_handler(client, live_server):
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
|
||||
134
changedetectionio/tests/test_filter_exist_changes.py
Normal file
134
changedetectionio/tests/test_filter_exist_changes.py
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# https://www.reddit.com/r/selfhosted/comments/wa89kp/comment/ii3a4g7/?context=3
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from .util import set_original_response, live_server_setup
|
||||
from changedetectionio.model import App
|
||||
|
||||
|
||||
def set_response_without_filter():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div id="nope-doesnt-exist">Some text thats the same</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
|
||||
def set_response_with_filter():
|
||||
test_return_data = """<html>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
<div class="ticket-available">Ticket now on sale!</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/endpoint-content.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def test_filter_doesnt_exist_then_exists_should_get_notification(client, live_server):
|
||||
# Filter knowingly doesn't exist, like someone setting up a known filter to see if some cinema tickets are on sale again
|
||||
# And the page has that filter available
|
||||
# Then I should get a notification
|
||||
|
||||
live_server_setup(live_server)
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
set_response_without_filter()
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("form_quick_watch_add"),
|
||||
data={"url": test_url, "tag": 'cinema'},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(3)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
url = url_for('test_notification_endpoint', _external=True)
|
||||
notification_url = url.replace('http', 'json')
|
||||
|
||||
print(">>>> Notification URL: " + notification_url)
|
||||
|
||||
# Just a regular notification setting, this will be used by the special 'filter not found' notification
|
||||
notification_form_data = {"notification_urls": notification_url,
|
||||
"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "BASE URL: {base_url}\n"
|
||||
"Watch URL: {watch_url}\n"
|
||||
"Watch UUID: {watch_uuid}\n"
|
||||
"Watch title: {watch_title}\n"
|
||||
"Watch tag: {watch_tag}\n"
|
||||
"Preview: {preview_url}\n"
|
||||
"Diff URL: {diff_url}\n"
|
||||
"Snapshot: {current_snapshot}\n"
|
||||
"Diff: {diff}\n"
|
||||
"Diff Full: {diff_full}\n"
|
||||
":-)",
|
||||
"notification_format": "Text"}
|
||||
|
||||
notification_form_data.update({
|
||||
"url": test_url,
|
||||
"tag": "my tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"css_filter": '.ticket-available',
|
||||
"fetch_backend": "html_requests"})
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data=notification_form_data,
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(3)
|
||||
|
||||
# Shouldn't exist, shouldn't have fired
|
||||
assert not os.path.isfile("test-datastore/notification.txt")
|
||||
# Now the filter should exist
|
||||
set_response_with_filter()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
|
||||
with open("test-datastore/notification.txt", 'r') as f:
|
||||
notification = f.read()
|
||||
|
||||
assert 'Ticket now on sale' in notification
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
|
||||
# Test that if it gets removed, then re-added, we get a notification
|
||||
# Remove the target and re-add it, we should get a new notification
|
||||
set_response_without_filter()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
assert not os.path.isfile("test-datastore/notification.txt")
|
||||
|
||||
set_response_with_filter()
|
||||
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
||||
time.sleep(3)
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
|
||||
# Also test that the filter was updated after the first one was requested
|
||||
@@ -26,6 +26,13 @@ def run_filter_test(client, content_filter):
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
# cleanup for the next
|
||||
client.get(
|
||||
url_for("form_delete", uuid="all"),
|
||||
follow_redirects=True
|
||||
)
|
||||
if os.path.isfile("test-datastore/notification.txt"):
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
@@ -34,6 +41,7 @@ def run_filter_test(client, content_filter):
|
||||
data={"url": test_url, "tag": ''},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Watch added" in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
@@ -67,6 +75,7 @@ def run_filter_test(client, content_filter):
|
||||
"tag": "my tag",
|
||||
"title": "my title",
|
||||
"headers": "",
|
||||
"filter_failure_notification_send": 'y',
|
||||
"css_filter": content_filter,
|
||||
"fetch_backend": "html_requests"})
|
||||
|
||||
@@ -86,7 +95,7 @@ def run_filter_test(client, content_filter):
|
||||
time.sleep(3)
|
||||
|
||||
# We should see something in the frontend
|
||||
assert b'Did the page change its layout' in res.data
|
||||
assert b'Warning, filter' in res.data
|
||||
|
||||
# Now it should exist and contain our "filter not found" alert
|
||||
assert os.path.isfile("test-datastore/notification.txt")
|
||||
@@ -132,3 +141,4 @@ def test_check_xpath_filter_failure_notification(client, live_server):
|
||||
time.sleep(1)
|
||||
run_filter_test(client, '//*[@id="nope-doesnt-exist"]')
|
||||
|
||||
# Test that notification is never sent
|
||||
@@ -21,10 +21,70 @@ class update_worker(threading.Thread):
|
||||
self.datastore = datastore
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def send_filter_failure_notification(self, uuid):
|
||||
def send_content_changed_notification(self, t, watch_uuid):
|
||||
|
||||
from changedetectionio import diff
|
||||
|
||||
n_object = {}
|
||||
watch = self.datastore.data['watching'].get(watch_uuid, False)
|
||||
if not watch:
|
||||
return
|
||||
|
||||
watch_history = watch.history
|
||||
dates = list(watch_history.keys())
|
||||
# Theoretically it's possible that this could be just 1 long,
|
||||
# - In the case that the timestamp key was not unique
|
||||
if len(dates) == 1:
|
||||
raise ValueError(
|
||||
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?"
|
||||
)
|
||||
|
||||
# Did it have any notification alerts to hit?
|
||||
if len(watch['notification_urls']):
|
||||
print(">>> Notifications queued for UUID from watch {}".format(watch_uuid))
|
||||
n_object['notification_urls'] = watch['notification_urls']
|
||||
n_object['notification_title'] = watch['notification_title']
|
||||
n_object['notification_body'] = watch['notification_body']
|
||||
n_object['notification_format'] = watch['notification_format']
|
||||
|
||||
# No? maybe theres a global setting, queue them all
|
||||
elif len(self.datastore.data['settings']['application']['notification_urls']):
|
||||
print(">>> Watch notification URLs were empty, using GLOBAL notifications for UUID: {}".format(watch_uuid))
|
||||
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
|
||||
n_object['notification_title'] = self.datastore.data['settings']['application']['notification_title']
|
||||
n_object['notification_body'] = self.datastore.data['settings']['application']['notification_body']
|
||||
n_object['notification_format'] = self.datastore.data['settings']['application']['notification_format']
|
||||
else:
|
||||
print(">>> NO notifications queued, watch and global notification URLs were empty.")
|
||||
|
||||
# Only prepare to notify if the rules above matched
|
||||
if 'notification_urls' in n_object:
|
||||
# HTML needs linebreak, but MarkDown and Text can use a linefeed
|
||||
if n_object['notification_format'] == 'HTML':
|
||||
line_feed_sep = "</br>"
|
||||
else:
|
||||
line_feed_sep = "\n"
|
||||
|
||||
snapshot_contents = ''
|
||||
with open(watch_history[dates[-1]], 'rb') as f:
|
||||
snapshot_contents = f.read()
|
||||
|
||||
n_object.update({
|
||||
'watch_url': watch['url'],
|
||||
'uuid': watch_uuid,
|
||||
'current_snapshot': snapshot_contents.decode('utf-8'),
|
||||
'diff': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], line_feed_sep=line_feed_sep),
|
||||
'diff_full': diff.render_diff(watch_history[dates[-2]], watch_history[dates[-1]], True, line_feed_sep=line_feed_sep)
|
||||
})
|
||||
|
||||
self.notification_q.put(n_object)
|
||||
|
||||
def send_filter_failure_notification(self, watch_uuid):
|
||||
|
||||
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts')
|
||||
watch = self.datastore.data['watching'].get(uuid, False)
|
||||
watch = self.datastore.data['watching'].get(watch_uuid, False)
|
||||
if not watch:
|
||||
return
|
||||
|
||||
n_object = {'notification_title': 'Changedetection.io - Alert - CSS/xPath filter was not present in the page',
|
||||
'notification_body': "Your configured CSS/xPath filter of '{}' for {{watch_url}} did not appear on the page after {} attempts, did the page change layout?\n\nLink: {{base_url}}/edit/{{watch_uuid}}\n\nThanks - Your omniscient changedetection.io installation :)\n".format(
|
||||
@@ -42,10 +102,10 @@ class update_worker(threading.Thread):
|
||||
if 'notification_urls' in n_object:
|
||||
n_object.update({
|
||||
'watch_url': watch['url'],
|
||||
'uuid': uuid
|
||||
'uuid': watch_uuid
|
||||
})
|
||||
self.notification_q.put(n_object)
|
||||
print("Sent filter not found notification for {}".format(uuid))
|
||||
print("Sent filter not found notification for {}".format(watch_uuid))
|
||||
|
||||
def run(self):
|
||||
from changedetectionio import fetch_site_status
|
||||
@@ -55,7 +115,7 @@ class update_worker(threading.Thread):
|
||||
while not self.app.config.exit.is_set():
|
||||
|
||||
try:
|
||||
uuid = self.q.get(block=False)
|
||||
priority, uuid = self.q.get(block=False)
|
||||
except queue.Empty:
|
||||
pass
|
||||
|
||||
@@ -63,12 +123,13 @@ class update_worker(threading.Thread):
|
||||
self.current_uuid = uuid
|
||||
|
||||
if uuid in list(self.datastore.data['watching'].keys()):
|
||||
|
||||
changed_detected = False
|
||||
contents = ""
|
||||
contents = b''
|
||||
screenshot = False
|
||||
update_obj= {}
|
||||
xpath_data = False
|
||||
process_changedetection_results = True
|
||||
print("> Processing UUID {} Priority {} URL {}".format(uuid, priority, self.datastore.data['watching'][uuid]['url']))
|
||||
now = time.time()
|
||||
|
||||
try:
|
||||
@@ -80,26 +141,37 @@ class update_worker(threading.Thread):
|
||||
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
|
||||
except PermissionError as e:
|
||||
self.app.logger.error("File permission error updating", uuid, str(e))
|
||||
process_changedetection_results = False
|
||||
except content_fetcher.ReplyWithContentButNoText as e:
|
||||
# Totally fine, it's by choice - just continue on, nothing more to care about
|
||||
# Page had elements/content but no renderable text
|
||||
# Backend (not filters) gave zero output
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Got HTML content but no text found."})
|
||||
process_changedetection_results = False
|
||||
|
||||
except FilterNotFoundInResponse as e:
|
||||
err_text = "Filter '{}' not found - Did the page change its layout?".format(str(e))
|
||||
c = 0
|
||||
if self.datastore.data['watching'].get(uuid, False):
|
||||
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)
|
||||
c += 1
|
||||
|
||||
# Send notification if we reached the threshold?
|
||||
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts', 0)
|
||||
print("Filter for {} not found, consecutive_filter_failures: {}".format(uuid, c))
|
||||
if threshold >0 and c >= threshold:
|
||||
self.send_filter_failure_notification(uuid)
|
||||
c = 0
|
||||
|
||||
err_text = "Warning, filter '{}' not found".format(str(e))
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
|
||||
'consecutive_filter_failures': c})
|
||||
# So that we get a trigger when the content is added again
|
||||
'previous_md5': ''})
|
||||
|
||||
# Only when enabled, send the notification
|
||||
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
|
||||
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)
|
||||
c += 1
|
||||
# Send notification if we reached the threshold?
|
||||
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
|
||||
0)
|
||||
print("Filter for {} not found, consecutive_filter_failures: {}".format(uuid, c))
|
||||
if threshold > 0 and c >= threshold:
|
||||
if not self.datastore.data['watching'][uuid].get('notification_muted'):
|
||||
self.send_filter_failure_notification(uuid)
|
||||
c = 0
|
||||
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'consecutive_filter_failures': c})
|
||||
|
||||
process_changedetection_results = True
|
||||
|
||||
except content_fetcher.EmptyReply as e:
|
||||
# Some kind of custom to-str handler in the exception handler that does this?
|
||||
err_text = "EmptyReply - try increasing 'Wait seconds before extracting text', Status Code {}".format(e.status_code)
|
||||
@@ -109,6 +181,7 @@ class update_worker(threading.Thread):
|
||||
err_text = "Screenshot unavailable, page did not render fully in the expected time - try increasing 'Wait seconds before extracting text'"
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
|
||||
'last_check_status': e.status_code})
|
||||
process_changedetection_results = False
|
||||
except content_fetcher.PageUnloadable as e:
|
||||
err_text = "Page request from server didnt respond correctly"
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
|
||||
@@ -116,8 +189,14 @@ class update_worker(threading.Thread):
|
||||
except Exception as e:
|
||||
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
||||
|
||||
# Other serious error
|
||||
process_changedetection_results = False
|
||||
else:
|
||||
# Mark that we never had any failures
|
||||
update_obj['consecutive_filter_failures'] = 0
|
||||
|
||||
# Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc
|
||||
if process_changedetection_results:
|
||||
try:
|
||||
watch = self.datastore.data['watching'][uuid]
|
||||
fname = "" # Saved history text filename
|
||||
@@ -125,68 +204,21 @@ class update_worker(threading.Thread):
|
||||
# For the FIRST time we check a site, or a change detected, save the snapshot.
|
||||
if changed_detected or not watch['last_checked']:
|
||||
# A change was detected
|
||||
fname = watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
|
||||
watch.save_history_text(contents=contents, timestamp=str(round(time.time())))
|
||||
|
||||
# Generally update anything interesting returned
|
||||
update_obj['consecutive_filter_failures'] = 0
|
||||
self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
|
||||
|
||||
# A change was detected
|
||||
if changed_detected:
|
||||
n_object = {}
|
||||
print (">> Change detected in UUID {} - {}".format(uuid, watch['url']))
|
||||
|
||||
# Notifications should only trigger on the second time (first time, we gather the initial snapshot)
|
||||
if watch.history_n >= 2:
|
||||
# Atleast 2, means there really was a change
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_changed': round(now)})
|
||||
if not self.datastore.data['watching'][uuid].get('notification_muted'):
|
||||
self.send_content_changed_notification(self, watch_uuid=uuid)
|
||||
|
||||
watch_history = watch.history
|
||||
dates = list(watch_history.keys())
|
||||
# Theoretically it's possible that this could be just 1 long,
|
||||
# - In the case that the timestamp key was not unique
|
||||
if len(dates) == 1:
|
||||
raise ValueError(
|
||||
"History index had 2 or more, but only 1 date loaded, timestamps were not unique? maybe two of the same timestamps got written, needs more delay?"
|
||||
)
|
||||
prev_fname = watch_history[dates[-2]]
|
||||
|
||||
# Did it have any notification alerts to hit?
|
||||
if len(watch['notification_urls']):
|
||||
print(">>> Notifications queued for UUID from watch {}".format(uuid))
|
||||
n_object['notification_urls'] = watch['notification_urls']
|
||||
n_object['notification_title'] = watch['notification_title']
|
||||
n_object['notification_body'] = watch['notification_body']
|
||||
n_object['notification_format'] = watch['notification_format']
|
||||
|
||||
# No? maybe theres a global setting, queue them all
|
||||
elif len(self.datastore.data['settings']['application']['notification_urls']):
|
||||
print(">>> Watch notification URLs were empty, using GLOBAL notifications for UUID: {}".format(uuid))
|
||||
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
|
||||
n_object['notification_title'] = self.datastore.data['settings']['application']['notification_title']
|
||||
n_object['notification_body'] = self.datastore.data['settings']['application']['notification_body']
|
||||
n_object['notification_format'] = self.datastore.data['settings']['application']['notification_format']
|
||||
else:
|
||||
print(">>> NO notifications queued, watch and global notification URLs were empty.")
|
||||
|
||||
# Only prepare to notify if the rules above matched
|
||||
if 'notification_urls' in n_object:
|
||||
# HTML needs linebreak, but MarkDown and Text can use a linefeed
|
||||
if n_object['notification_format'] == 'HTML':
|
||||
line_feed_sep = "</br>"
|
||||
else:
|
||||
line_feed_sep = "\n"
|
||||
|
||||
from changedetectionio import diff
|
||||
n_object.update({
|
||||
'watch_url': watch['url'],
|
||||
'uuid': uuid,
|
||||
'current_snapshot': contents.decode('utf-8'),
|
||||
'diff': diff.render_diff(prev_fname, fname, line_feed_sep=line_feed_sep),
|
||||
'diff_full': diff.render_diff(prev_fname, fname, True, line_feed_sep=line_feed_sep)
|
||||
})
|
||||
|
||||
self.notification_q.put(n_object)
|
||||
|
||||
except Exception as e:
|
||||
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
|
||||
@@ -194,7 +226,7 @@ class update_worker(threading.Thread):
|
||||
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
|
||||
|
||||
finally:
|
||||
|
||||
# Always record that we atleast tried
|
||||
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
|
||||
'last_checked': round(time.time())})
|
||||
|
||||
231
memory-leak.html
231
memory-leak.html
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user