Compare commits

...

27 Commits

Author SHA1 Message Date
dgtlmoon
1a4f8ee2e7 simplify 2022-11-02 22:52:04 +01:00
dgtlmoon
078cfe9333 remove cache 2022-11-02 22:48:54 +01:00
dgtlmoon
6efb9d46a1 fix ID 2022-11-02 22:47:41 +01:00
dgtlmoon
e109c1e190 Revert "Docker & python - Use pip conditional requirements to not install playwright for ARM (unsupported on ARM) (#1067)"
This reverts commit c93ca1841c.
2022-11-02 22:45:39 +01:00
dgtlmoon
e17bfda5cc add comment 2022-11-02 22:43:36 +01:00
dgtlmoon
cc7e09529b Perform Alpine musl type build test 2022-11-02 22:42:49 +01:00
dgtlmoon
defc7a340e 0.39.21 2022-11-02 15:12:33 +01:00
dgtlmoon
c197c062e1 Disable version check when pytest is running (#1084) 2022-11-01 18:26:29 +01:00
dgtlmoon
77b59809ca Removing unused code (#1070) 2022-10-28 18:36:07 +02:00
dgtlmoon
f90b170e68 Docker & python - Jq conditional pip requirements.txt include (Don't install in Windows because theres no Windows library/wheel) 2022-10-27 23:26:14 +02:00
dgtlmoon
c93ca1841c Docker & python - Use pip conditional requirements to not install playwright for ARM (unsupported on ARM) (#1067) 2022-10-27 23:17:05 +02:00
Sandro
57f604dff1 UI - Make fetch error more readable (#1038) 2022-10-27 16:40:24 +02:00
dgtlmoon
8499468749 Update README.md 2022-10-27 15:17:14 +02:00
dgtlmoon
7f6a13ea6c Re #1052 - Watch 'open' link should use any dynamic/template info (#1063) 2022-10-27 13:29:24 +02:00
dgtlmoon
9874f0cbc7 Remove accidental files 2022-10-27 12:43:02 +02:00
dgtlmoon
72834a42fd Backups and Snapshots - Data directory now fully portable, (all paths are relative) , refactored backup zip export creation 2022-10-27 12:35:26 +02:00
dgtlmoon
724cb17224 Re #1052 - Dynamic URLs, use variables in the URL (such as the current date, the date in a month, and other logic see https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL ) (#1057) 2022-10-24 23:20:39 +02:00
dgtlmoon
4eb4b401a1 API - system info - allow 5 minutes grace before watch is considered 'overdue' 2022-10-23 23:12:28 +02:00
dgtlmoon
5d40e16c73 API - Adding basic system info/system state API (#1051) 2022-10-23 19:15:11 +02:00
dgtlmoon
492bbce6b6 Build - Fix syntax in container build test (#1050) 2022-10-23 16:02:13 +02:00
dgtlmoon
0394a56be5 Building - Test container build on PR 2022-10-23 15:54:19 +02:00
Entepotenz
7839551d6b Testing - Use same version of playwright while running tests as in production builds (#1047) 2022-10-23 11:26:32 +02:00
Entepotenz
9c5588c791 update path for validation in the CONTRIBUTING.md (#1046) 2022-10-23 11:25:29 +02:00
dgtlmoon
5a43a350de History index safety check - Be sure that only valid history index lines are read (#1042) 2022-10-19 22:41:13 +02:00
Michael McMillan
3c31f023ce Option to Hide the Referer header from monitored websites. (#996) 2022-10-18 09:16:22 +02:00
dgtlmoon
4cbcc59461 0.39.20.4 2022-10-17 18:36:47 +02:00
dgtlmoon
4be0260381 Better cross platform file handling in diff and preview (#1034) 2022-10-17 18:36:22 +02:00
23 changed files with 311 additions and 101 deletions

31
.github/test/Dockerfile-alpine vendored Normal file
View File

@@ -0,0 +1,31 @@
# Taken from https://github.com/linuxserver/docker-changedetection.io/blob/main/Dockerfile
# Test that we can still build on Alpine (musl modified libc https://musl.libc.org/)
# Some packages wont install via pypi because they dont have a wheel available under this architecture.
FROM ghcr.io/linuxserver/baseimage-alpine:3.16
ENV PYTHONUNBUFFERED=1
COPY requirements.txt /requirements.txt
RUN \
apk add --update --no-cache --virtual=build-dependencies \
cargo \
g++ \
gcc \
libc-dev \
libffi-dev \
libxslt-dev \
make \
openssl-dev \
py3-wheel \
python3-dev \
zlib-dev && \
apk add --update --no-cache \
libxslt \
python3 \
py3-pip && \
echo "**** pip3 install test of changedetection.io ****" && \
pip3 install -U pip wheel setuptools && \
pip3 install -U --no-cache-dir --find-links https://wheel-index.linuxserver.io/alpine-3.16/ -r /requirements.txt && \
apk del --purge \
build-dependencies

View File

@@ -1,12 +1,21 @@
name: ChangeDetection.io Container Build Test name: ChangeDetection.io Container Build Test
# Triggers the workflow on push or pull request events # Triggers the workflow on push or pull request events
# This line doesnt work, even tho it is the documented one
#on: [push, pull_request]
on: on:
push: push:
paths: paths:
- requirements.txt - requirements.txt
- Dockerfile - Dockerfile
pull_request:
paths:
- requirements.txt
- Dockerfile
# Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing # Changes to requirements.txt packages and Dockerfile may or may not always be compatible with arm etc, so worth testing
# @todo: some kind of path filter for requirements.txt and Dockerfile # @todo: some kind of path filter for requirements.txt and Dockerfile
jobs: jobs:
@@ -34,6 +43,16 @@ jobs:
version: latest version: latest
driver-opts: image=moby/buildkit:master driver-opts: image=moby/buildkit:master
# https://github.com/dgtlmoon/changedetection.io/pull/1067
# Check we can still build under alpine/musl
- name: Test that the docker containers can build (musl via alpine check)
id: docker_build_musl
uses: docker/build-push-action@v2
with:
context: ./
file: ./.github/test/Dockerfile-alpine
platforms: linux/amd64,linux/arm64
- name: Test that the docker containers can build - name: Test that the docker containers can build
id: docker_build id: docker_build
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
@@ -44,3 +63,4 @@ jobs:
platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64, platforms: linux/arm/v7,linux/arm/v6,linux/amd64,linux/arm64,
cache-from: type=local,src=/tmp/.buildx-cache cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache

View File

@@ -6,7 +6,7 @@ Otherwise, it's always best to PR into the `dev` branch.
Please be sure that all new functionality has a matching test! Please be sure that all new functionality has a matching test!
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notifications.py` for example Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notification.py` for example
``` ```
pip3 install -r requirements-dev pip3 install -r requirements-dev

View File

@@ -23,14 +23,10 @@ RUN pip install --target=/dependencies -r /requirements.txt
# Playwright is an alternative to Selenium # Playwright is an alternative to Selenium
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing # Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
# https://github.com/dgtlmoon/changedetection.io/pull/1067 also musl/alpine (not supported)
RUN pip install --target=/dependencies playwright~=1.26 \ RUN pip install --target=/dependencies playwright~=1.26 \
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled." || echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
RUN pip install --target=/dependencies jq~=1.3 \
|| echo "WARN: Failed to install JQ. The application can still run, but the Jq: filter option will be disabled."
# Final image stage # Final image stage
FROM python:3.8-slim FROM python:3.8-slim
@@ -64,6 +60,7 @@ EXPOSE 5000
# The actual flask app # The actual flask app
COPY changedetectionio /app/changedetectionio COPY changedetectionio /app/changedetectionio
# The eventlet server wrapper # The eventlet server wrapper
COPY changedetection.py /app/changedetection.py COPY changedetection.py /app/changedetection.py

View File

@@ -167,9 +167,6 @@ One big advantage of `jq` is that you can use logic in your JSON filter, such as
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/JSON-Selector-Filter-help for more information and examples See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/JSON-Selector-Filter-help for more information and examples
Note: `jq` library must be added separately (`pip3 install jq`)
### Parse JSON embedded in HTML! ### Parse JSON embedded in HTML!
When you enable a `json:` or `jq:` filter, you can even automatically extract and parse embedded JSON inside a HTML page! Amazingly handy for sites that build content based on JSON, such as many e-commerce websites. When you enable a `json:` or `jq:` filter, you can even automatically extract and parse embedded JSON inside a HTML page! Amazingly handy for sites that build content based on JSON, such as many e-commerce websites.
@@ -184,9 +181,9 @@ When you enable a `json:` or `jq:` filter, you can even automatically extract an
`json:$.price` or `jq:.price` would give `23.50`, or you can extract the whole structure `json:$.price` or `jq:.price` would give `23.50`, or you can extract the whole structure
## Proxy configuration ## Proxy Configuration
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration , we also support using [BrightData proxy services where possible]( https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration#brightdata-proxy-support)
## Raspberry Pi support? ## Raspberry Pi support?

View File

@@ -33,7 +33,7 @@ from flask_wtf import CSRFProtect
from changedetectionio import html_tools from changedetectionio import html_tools
from changedetectionio.api import api_v1 from changedetectionio.api import api_v1
__version__ = '0.39.20.3' __version__ = '0.39.21'
datastore = None datastore = None
@@ -194,7 +194,8 @@ def changedetection_app(config=None, datastore_o=None):
watch_api.add_resource(api_v1.Watch, '/api/v1/watch/<string:uuid>', watch_api.add_resource(api_v1.Watch, '/api/v1/watch/<string:uuid>',
resource_class_kwargs={'datastore': datastore, 'update_q': update_q}) resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
watch_api.add_resource(api_v1.SystemInfo, '/api/v1/systeminfo',
resource_class_kwargs={'datastore': datastore, 'update_q': update_q})
@@ -819,8 +820,8 @@ def changedetection_app(config=None, datastore_o=None):
# Read as binary and force decode as UTF-8 # Read as binary and force decode as UTF-8
# Windows may fail decode in python if we just use 'r' mode (chardet decode exception) # Windows may fail decode in python if we just use 'r' mode (chardet decode exception)
try: try:
with open(newest_file, 'rb') as f: with open(newest_file, 'r', encoding='utf-8', errors='ignore') as f:
newest_version_file_contents = f.read().decode('utf-8') newest_version_file_contents = f.read()
except Exception as e: except Exception as e:
newest_version_file_contents = "Unable to read {}.\n".format(newest_file) newest_version_file_contents = "Unable to read {}.\n".format(newest_file)
@@ -832,8 +833,8 @@ def changedetection_app(config=None, datastore_o=None):
previous_file = history[dates[-2]] previous_file = history[dates[-2]]
try: try:
with open(previous_file, 'rb') as f: with open(previous_file, 'r', encoding='utf-8', errors='ignore') as f:
previous_version_file_contents = f.read().decode('utf-8') previous_version_file_contents = f.read()
except Exception as e: except Exception as e:
previous_version_file_contents = "Unable to read {}.\n".format(previous_file) previous_version_file_contents = "Unable to read {}.\n".format(previous_file)
@@ -909,7 +910,7 @@ def changedetection_app(config=None, datastore_o=None):
timestamp = list(watch.history.keys())[-1] timestamp = list(watch.history.keys())[-1]
filename = watch.history[timestamp] filename = watch.history[timestamp]
try: try:
with open(filename, 'r') as f: with open(filename, 'r', encoding='utf-8', errors='ignore') as f:
tmp = f.readlines() tmp = f.readlines()
# Get what needs to be highlighted # Get what needs to be highlighted
@@ -984,9 +985,6 @@ def changedetection_app(config=None, datastore_o=None):
# create a ZipFile object # create a ZipFile object
backupname = "changedetection-backup-{}.zip".format(int(time.time())) backupname = "changedetection-backup-{}.zip".format(int(time.time()))
# We only care about UUIDS from the current index file
uuids = list(datastore.data['watching'].keys())
backup_filepath = os.path.join(datastore_o.datastore_path, backupname) backup_filepath = os.path.join(datastore_o.datastore_path, backupname)
with zipfile.ZipFile(backup_filepath, "w", with zipfile.ZipFile(backup_filepath, "w",
@@ -1002,12 +1000,12 @@ def changedetection_app(config=None, datastore_o=None):
# Add the flask app secret # Add the flask app secret
zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt") zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt")
# Add any snapshot data we find, use the full path to access the file, but make the file 'relative' in the Zip. # Add any data in the watch data directory.
for txt_file_path in Path(datastore_o.datastore_path).rglob('*.txt'): for uuid, w in datastore.data['watching'].items():
parent_p = txt_file_path.parent for f in Path(w.watch_data_dir).glob('*'):
if parent_p.name in uuids: zipObj.write(f,
zipObj.write(txt_file_path, # Use the full path to access the file, but make the file 'relative' in the Zip.
arcname=str(txt_file_path).replace(datastore_o.datastore_path, ''), arcname=os.path.join(f.parts[-2], f.parts[-1]),
compress_type=zipfile.ZIP_DEFLATED, compress_type=zipfile.ZIP_DEFLATED,
compresslevel=8) compresslevel=8)
@@ -1309,8 +1307,8 @@ def changedetection_app(config=None, datastore_o=None):
threading.Thread(target=notification_runner).start() threading.Thread(target=notification_runner).start()
# Check for new release version, but not when running in test/build # Check for new release version, but not when running in test/build or pytest
if not os.getenv("GITHUB_REF", False): if not os.getenv("GITHUB_REF", False) and not config.get('disable_checkver') == True:
threading.Thread(target=check_for_new_version).start() threading.Thread(target=check_for_new_version).start()
return app return app

View File

@@ -122,3 +122,37 @@ class CreateWatch(Resource):
return {'status': "OK"}, 200 return {'status': "OK"}, 200
return list, 200 return list, 200
class SystemInfo(Resource):
def __init__(self, **kwargs):
# datastore is a black box dependency
self.datastore = kwargs['datastore']
self.update_q = kwargs['update_q']
@auth.check_token
def get(self):
import time
overdue_watches = []
# Check all watches and report which have not been checked but should have been
for uuid, watch in self.datastore.data.get('watching', {}).items():
# see if now - last_checked is greater than the time that should have been
# this is not super accurate (maybe they just edited it) but better than nothing
t = watch.threshold_seconds()
if not t:
# Use the system wide default
t = self.datastore.threshold_seconds
time_since_check = time.time() - watch.get('last_checked')
# Allow 5 minutes of grace time before we decide it's overdue
if time_since_check - (5 * 60) > t:
overdue_watches.append(uuid)
return {
'queue_size': self.update_q.qsize(),
'overdue_watches': overdue_watches,
'uptime': round(time.time() - self.datastore.start_time, 2),
'watch_count': len(self.datastore.data.get('watching', {}))
}, 200

View File

@@ -102,6 +102,14 @@ def main():
has_password=datastore.data['settings']['application']['password'] != False has_password=datastore.data['settings']['application']['password'] != False
) )
# Monitored websites will not receive a Referer header
# when a user clicks on an outgoing link.
@app.after_request
def hide_referrer(response):
if os.getenv("HIDE_REFERER", False):
response.headers["Referrer-Policy"] = "no-referrer"
return response
# Proxy sub-directory support # Proxy sub-directory support
# Set environment var USE_X_SETTINGS=1 on this script # Set environment var USE_X_SETTINGS=1 on this script
# And then in your proxy_pass settings # And then in your proxy_pass settings

View File

@@ -65,7 +65,9 @@ class perform_site_check():
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '') request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
timeout = self.datastore.data['settings']['requests'].get('timeout') timeout = self.datastore.data['settings']['requests'].get('timeout')
url = watch.get('url')
url = watch.link
request_body = self.datastore.data['watching'][uuid].get('body') request_body = self.datastore.data['watching'][uuid].get('body')
request_method = self.datastore.data['watching'][uuid].get('method') request_method = self.datastore.data['watching'][uuid].get('method')
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False) ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
@@ -186,9 +188,6 @@ class perform_site_check():
# Re #340 - return the content before the 'ignore text' was applied # Re #340 - return the content before the 'ignore text' was applied
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8') text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
# Re #340 - return the content before the 'ignore text' was applied
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
# Treat pages with no renderable text content as a change? No by default # Treat pages with no renderable text content as a change? No by default
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False) empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0: if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:

View File

@@ -1,6 +1,8 @@
import os
import uuid as uuid_builder
from distutils.util import strtobool from distutils.util import strtobool
import logging
import os
import time
import uuid
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60)) minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7} mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
@@ -22,7 +24,7 @@ class model(dict):
#'newest_history_key': 0, #'newest_history_key': 0,
'title': None, 'title': None,
'previous_md5': False, 'previous_md5': False,
'uuid': str(uuid_builder.uuid4()), 'uuid': str(uuid.uuid4()),
'headers': {}, # Extra headers to send 'headers': {}, # Extra headers to send
'body': None, 'body': None,
'method': 'GET', 'method': 'GET',
@@ -60,7 +62,7 @@ class model(dict):
self.update(self.__base_config) self.update(self.__base_config)
self.__datastore_path = kw['datastore_path'] self.__datastore_path = kw['datastore_path']
self['uuid'] = str(uuid_builder.uuid4()) self['uuid'] = str(uuid.uuid4())
del kw['datastore_path'] del kw['datastore_path']
@@ -82,10 +84,19 @@ class model(dict):
return False return False
def ensure_data_dir_exists(self): def ensure_data_dir_exists(self):
target_path = os.path.join(self.__datastore_path, self['uuid']) if not os.path.isdir(self.watch_data_dir):
if not os.path.isdir(target_path): print ("> Creating data dir {}".format(self.watch_data_dir))
print ("> Creating data dir {}".format(target_path)) os.mkdir(self.watch_data_dir)
os.mkdir(target_path)
@property
def link(self):
url = self.get('url', '')
if '{%' in url or '{{' in url:
from jinja2 import Environment
# Jinja2 available in URLs along with https://pypi.org/project/jinja2-time/
jinja2_env = Environment(extensions=['jinja2_time.TimeExtension'])
return str(jinja2_env.from_string(url).render())
return url
@property @property
def label(self): def label(self):
@@ -109,16 +120,40 @@ class model(dict):
@property @property
def history(self): def history(self):
"""History index is just a text file as a list
{watch-uuid}/history.txt
contains a list like
{epoch-time},{filename}\n
We read in this list as the history information
"""
tmp_history = {} tmp_history = {}
import logging
import time
# Read the history file as a dict # Read the history file as a dict
fname = os.path.join(self.__datastore_path, self.get('uuid'), "history.txt") fname = os.path.join(self.watch_data_dir, "history.txt")
if os.path.isfile(fname): if os.path.isfile(fname):
logging.debug("Reading history index " + str(time.time())) logging.debug("Reading history index " + str(time.time()))
with open(fname, "r") as f: with open(fname, "r") as f:
tmp_history = dict(i.strip().split(',', 2) for i in f.readlines()) for i in f.readlines():
if ',' in i:
k, v = i.strip().split(',', 2)
# The index history could contain a relative path, so we need to make the fullpath
# so that python can read it
if not '/' in v and not '\'' in v:
v = os.path.join(self.watch_data_dir, v)
else:
# It's possible that they moved the datadir on older versions
# So the snapshot exists but is in a different path
snapshot_fname = v.split('/')[-1]
proposed_new_path = os.path.join(self.watch_data_dir, snapshot_fname)
if not os.path.exists(v) and os.path.exists(proposed_new_path):
v = proposed_new_path
tmp_history[k] = v
if len(tmp_history): if len(tmp_history):
self.__newest_history_key = list(tmp_history.keys())[-1] self.__newest_history_key = list(tmp_history.keys())[-1]
@@ -129,7 +164,7 @@ class model(dict):
@property @property
def has_history(self): def has_history(self):
fname = os.path.join(self.__datastore_path, self.get('uuid'), "history.txt") fname = os.path.join(self.watch_data_dir, "history.txt")
return os.path.isfile(fname) return os.path.isfile(fname)
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0. # Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
@@ -148,24 +183,25 @@ class model(dict):
# Save some text file to the appropriate path and bump the history # Save some text file to the appropriate path and bump the history
# result_obj from fetch_site_status.run() # result_obj from fetch_site_status.run()
def save_history_text(self, contents, timestamp): def save_history_text(self, contents, timestamp):
import uuid
import logging
output_path = os.path.join(self.__datastore_path, self['uuid'])
self.ensure_data_dir_exists() self.ensure_data_dir_exists()
snapshot_fname = os.path.join(output_path, str(uuid.uuid4()))
logging.debug("Saving history text {}".format(snapshot_fname)) # Small hack so that we sleep just enough to allow 1 second between history snapshots
# this is because history.txt indexes/keys snapshots by epoch seconds and we dont want dupe keys
if self.__newest_history_key and int(timestamp) == int(self.__newest_history_key):
time.sleep(timestamp - self.__newest_history_key)
# in /diff/ we are going to assume for now that it's UTF-8 when reading snapshot_fname = "{}.txt".format(str(uuid.uuid4()))
with open(snapshot_fname, 'wb') as f:
# in /diff/ and /preview/ we are going to assume for now that it's UTF-8 when reading
# most sites are utf-8 and some are even broken utf-8
with open(os.path.join(self.watch_data_dir, snapshot_fname), 'wb') as f:
f.write(contents) f.write(contents)
f.close() f.close()
# Append to index # Append to index
# @todo check last char was \n # @todo check last char was \n
index_fname = os.path.join(output_path, "history.txt") index_fname = os.path.join(self.watch_data_dir, "history.txt")
with open(index_fname, 'a') as f: with open(index_fname, 'a') as f:
f.write("{},{}\n".format(timestamp, snapshot_fname)) f.write("{},{}\n".format(timestamp, snapshot_fname))
f.close() f.close()
@@ -206,14 +242,14 @@ class model(dict):
return not local_lines.issubset(existing_history) return not local_lines.issubset(existing_history)
def get_screenshot(self): def get_screenshot(self):
fname = os.path.join(self.__datastore_path, self['uuid'], "last-screenshot.png") fname = os.path.join(self.watch_data_dir, "last-screenshot.png")
if os.path.isfile(fname): if os.path.isfile(fname):
return fname return fname
return False return False
def __get_file_ctime(self, filename): def __get_file_ctime(self, filename):
fname = os.path.join(self.__datastore_path, self['uuid'], filename) fname = os.path.join(self.watch_data_dir, filename)
if os.path.isfile(fname): if os.path.isfile(fname):
return int(os.path.getmtime(fname)) return int(os.path.getmtime(fname))
return False return False
@@ -238,9 +274,14 @@ class model(dict):
def snapshot_error_screenshot_ctime(self): def snapshot_error_screenshot_ctime(self):
return self.__get_file_ctime('last-error-screenshot.png') return self.__get_file_ctime('last-error-screenshot.png')
@property
def watch_data_dir(self):
# The base dir of the watch data
return os.path.join(self.__datastore_path, self['uuid'])
def get_error_text(self): def get_error_text(self):
"""Return the text saved from a previous request that resulted in a non-200 error""" """Return the text saved from a previous request that resulted in a non-200 error"""
fname = os.path.join(self.__datastore_path, self['uuid'], "last-error.txt") fname = os.path.join(self.watch_data_dir, "last-error.txt")
if os.path.isfile(fname): if os.path.isfile(fname):
with open(fname, 'r') as f: with open(fname, 'r') as f:
return f.read() return f.read()
@@ -248,7 +289,7 @@ class model(dict):
def get_error_snapshot(self): def get_error_snapshot(self):
"""Return path to the screenshot that resulted in a non-200 error""" """Return path to the screenshot that resulted in a non-200 error"""
fname = os.path.join(self.__datastore_path, self['uuid'], "last-error-screenshot.png") fname = os.path.join(self.watch_data_dir, "last-error-screenshot.png")
if os.path.isfile(fname): if os.path.isfile(fname):
return fname return fname
return False return False

View File

@@ -9,6 +9,8 @@
# exit when any command fails # exit when any command fails
set -e set -e
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
find tests/test_*py -type f|while read test_name find tests/test_*py -type f|while read test_name
do do
echo "TEST RUNNING $test_name" echo "TEST RUNNING $test_name"
@@ -22,14 +24,6 @@ echo "RUNNING WITH BASE_URL SET"
export BASE_URL="https://really-unique-domain.io" export BASE_URL="https://really-unique-domain.io"
pytest tests/test_notification.py pytest tests/test_notification.py
## JQ + JSON: filter test
# jq is not available on windows and we should just test it when the package is installed
# this will re-test with jq support
pip3 install jq~=1.3
pytest tests/test_jsonpath_jq_selector.py
# Now for the selenium and playwright/browserless fetchers # Now for the selenium and playwright/browserless fetchers
# Note - this is not UI functional tests - just checking that each one can fetch the content # Note - this is not UI functional tests - just checking that each one can fetch the content
@@ -45,7 +39,9 @@ docker kill $$-test_selenium
echo "TESTING WEBDRIVER FETCH > PLAYWRIGHT/BROWSERLESS..." echo "TESTING WEBDRIVER FETCH > PLAYWRIGHT/BROWSERLESS..."
# Not all platforms support playwright (not ARM/rPI), so it's not packaged in requirements.txt # Not all platforms support playwright (not ARM/rPI), so it's not packaged in requirements.txt
pip3 install playwright~=1.24 PLAYWRIGHT_VERSION=$(grep -i -E "RUN pip install.+" "$SCRIPT_DIR/../Dockerfile" | grep --only-matching -i -E "playwright[=><~+]+[0-9\.]+")
echo "using $PLAYWRIGHT_VERSION"
pip3 install "$PLAYWRIGHT_VERSION"
docker run -d --name $$-test_browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable docker run -d --name $$-test_browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
# takes a while to spin up # takes a while to spin up
sleep 5 sleep 5

View File

@@ -156,7 +156,7 @@ body:after, body:before {
.fetch-error { .fetch-error {
padding-top: 1em; padding-top: 1em;
font-size: 60%; font-size: 80%;
max-width: 400px; max-width: 400px;
display: block; display: block;
} }

View File

@@ -27,17 +27,18 @@ class ChangeDetectionStore:
# For when we edit, we should write to disk # For when we edit, we should write to disk
needs_write_urgent = False needs_write_urgent = False
__version_check = True
def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"): def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"):
# Should only be active for docker # Should only be active for docker
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO) # logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
self.needs_write = False self.__data = App.model()
self.datastore_path = datastore_path self.datastore_path = datastore_path
self.json_store_path = "{}/url-watches.json".format(self.datastore_path) self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
self.needs_write = False
self.proxy_list = None self.proxy_list = None
self.start_time = time.time()
self.stop_thread = False self.stop_thread = False
self.__data = App.model()
# Base definition for all watchers # Base definition for all watchers
# deepcopy part of #569 - not sure why its needed exactly # deepcopy part of #569 - not sure why its needed exactly
self.generic_definition = deepcopy(Watch.model(datastore_path = datastore_path, default={})) self.generic_definition = deepcopy(Watch.model(datastore_path = datastore_path, default={}))

View File

@@ -40,7 +40,8 @@
<fieldset> <fieldset>
<div class="pure-control-group"> <div class="pure-control-group">
{{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }} {{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }}
<span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span> <span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span><br/>
<span class="pure-form-message-inline">You can use variables in the URL, perfect for inserting the current date and other logic, <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Handling-variables-in-the-watched-URL">help and examples here</a></span><br/>
</div> </div>
<div class="pure-control-group"> <div class="pure-control-group">
{{ render_field(form.title, class="m-d") }} {{ render_field(form.title, class="m-d") }}

View File

@@ -87,7 +87,7 @@
<a class="state-{{'on' if watch.notification_muted}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications"/></a> <a class="state-{{'on' if watch.notification_muted}}" href="{{url_for('index', op='mute', uuid=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='bell-off.svg')}}" alt="Mute notifications" title="Mute notifications"/></a>
</td> </td>
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}} <td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
<a class="external" target="_blank" rel="noopener" href="{{ watch.url.replace('source:','') }}"></a> <a class="external" target="_blank" rel="noopener" href="{{ watch.link.replace('source:','') }}"></a>
<a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a> <a href="{{url_for('form_share_put_watch', uuid=watch.uuid)}}"><img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='spread.svg')}}" /></a>
{%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" />{% endif %} {%if watch.fetch_backend == "html_webdriver" %}<img style="height: 1em; display:inline-block;" src="{{url_for('static_content', group='images', filename='Google-Chrome-icon.png')}}" />{% endif %}

View File

@@ -41,7 +41,7 @@ def app(request):
cleanup(datastore_path) cleanup(datastore_path)
app_config = {'datastore_path': datastore_path} app_config = {'datastore_path': datastore_path, 'disable_checkver' : True}
cleanup(app_config['datastore_path']) cleanup(app_config['datastore_path'])
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], include_default_watches=False) datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], include_default_watches=False)
app = changedetection_app(app_config, datastore) app = changedetection_app(app_config, datastore)

View File

@@ -147,6 +147,16 @@ def test_api_simple(client, live_server):
# @todo how to handle None/default global values? # @todo how to handle None/default global values?
assert watch['history_n'] == 2, "Found replacement history section, which is in its own API" assert watch['history_n'] == 2, "Found replacement history section, which is in its own API"
# basic systeminfo check
res = client.get(
url_for("systeminfo"),
headers={'x-api-key': api_key},
)
info = json.loads(res.data)
assert info.get('watch_count') == 1
assert info.get('uptime') > 0.5
# Finally delete the watch # Finally delete the watch
res = client.delete( res = client.delete(
url_for("watch", uuid=watch_uuid), url_for("watch", uuid=watch_uuid),

View File

@@ -3,7 +3,7 @@
import time import time
from flask import url_for from flask import url_for
from urllib.request import urlopen from urllib.request import urlopen
from .util import set_original_response, set_modified_response, live_server_setup from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks
sleep_time_for_fetch_thread = 3 sleep_time_for_fetch_thread = 3
@@ -36,7 +36,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
client.get(url_for("form_watch_checknow"), follow_redirects=True) client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up # Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread) wait_for_all_checks(client)
# It should report nothing found (no new 'unviewed' class) # It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index")) res = client.get(url_for("index"))
@@ -69,7 +69,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
res = client.get(url_for("form_watch_checknow"), follow_redirects=True) res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches are queued for rechecking.' in res.data assert b'1 watches are queued for rechecking.' in res.data
time.sleep(sleep_time_for_fetch_thread) wait_for_all_checks(client)
# Now something should be ready, indicated by having a 'unviewed' class # Now something should be ready, indicated by having a 'unviewed' class
res = client.get(url_for("index")) res = client.get(url_for("index"))
@@ -98,14 +98,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
assert b'which has this one new line' in res.data assert b'which has this one new line' in res.data
assert b'Which is across multiple lines' not in res.data assert b'Which is across multiple lines' not in res.data
time.sleep(2) wait_for_all_checks(client)
# Do this a few times.. ensures we dont accidently set the status # Do this a few times.. ensures we dont accidently set the status
for n in range(2): for n in range(2):
client.get(url_for("form_watch_checknow"), follow_redirects=True) client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up # Give the thread time to pick it up
time.sleep(sleep_time_for_fetch_thread) wait_for_all_checks(client)
# It should report nothing found (no new 'unviewed' class) # It should report nothing found (no new 'unviewed' class)
res = client.get(url_for("index")) res = client.get(url_for("index"))
@@ -125,7 +125,7 @@ def test_check_basic_change_detection_functionality(client, live_server):
) )
client.get(url_for("form_watch_checknow"), follow_redirects=True) client.get(url_for("form_watch_checknow"), follow_redirects=True)
time.sleep(sleep_time_for_fetch_thread) wait_for_all_checks(client)
res = client.get(url_for("index")) res = client.get(url_for("index"))
assert b'unviewed' in res.data assert b'unviewed' in res.data

View File

@@ -1,18 +1,31 @@
#!/usr/bin/python3 #!/usr/bin/python3
import time from .util import set_original_response, set_modified_response, live_server_setup
from flask import url_for from flask import url_for
from urllib.request import urlopen from urllib.request import urlopen
from . util import set_original_response, set_modified_response, live_server_setup from zipfile import ZipFile
import re
import time
def test_backup(client, live_server): def test_backup(client, live_server):
live_server_setup(live_server) live_server_setup(live_server)
set_original_response()
# Give the endpoint time to spin up # Give the endpoint time to spin up
time.sleep(1) time.sleep(1)
# Add our URL to the import page
res = client.post(
url_for("import_page"),
data={"urls": url_for('test_endpoint', _external=True)},
follow_redirects=True
)
assert b"1 Imported" in res.data
time.sleep(3)
res = client.get( res = client.get(
url_for("get_backup"), url_for("get_backup"),
follow_redirects=True follow_redirects=True
@@ -20,6 +33,19 @@ def test_backup(client, live_server):
# Should get the right zip content type # Should get the right zip content type
assert res.content_type == "application/zip" assert res.content_type == "application/zip"
# Should be PK/ZIP stream # Should be PK/ZIP stream
assert res.data.count(b'PK') >= 2 assert res.data.count(b'PK') >= 2
# ZipFile from buffer seems non-obvious, just save it instead
with open("download.zip", 'wb') as f:
f.write(res.data)
zip = ZipFile('download.zip')
l = zip.namelist()
uuid4hex = re.compile('^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}.*txt', re.I)
newlist = list(filter(uuid4hex.match, l)) # Read Note below
# Should be two txt files in the archive (history and the snapshot)
assert len(newlist) == 2

View File

@@ -0,0 +1,33 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import live_server_setup
# If there was only a change in the whitespacing, then we shouldnt have a change detected
def test_jinja2_in_url_query(client, live_server):
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
# Add our URL to the import page
test_url = url_for('test_return_query', _external=True)
# because url_for() will URL-encode the var, but we dont here
full_url = "{}?{}".format(test_url,
"date={% now 'Europe/Berlin', '%Y' %}.{% now 'Europe/Berlin', '%m' %}.{% now 'Europe/Berlin', '%d' %}", )
res = client.post(
url_for("form_quick_watch_add"),
data={"url": full_url, "tag": "test"},
follow_redirects=True
)
assert b"Watch added" in res.data
time.sleep(3)
# It should report nothing found (no new 'unviewed' class)
res = client.get(
url_for("preview_page", uuid="first"),
follow_redirects=True
)
assert b'date=2' in res.data

View File

@@ -86,6 +86,7 @@ def extract_UUID_from_client(client):
def wait_for_all_checks(client): def wait_for_all_checks(client):
# Loop waiting until done.. # Loop waiting until done..
attempt=0 attempt=0
time.sleep(0.1)
while attempt < 60: while attempt < 60:
time.sleep(1) time.sleep(1)
res = client.get(url_for("index")) res = client.get(url_for("index"))
@@ -159,5 +160,10 @@ def live_server_setup(live_server):
ret = " ".join([auth.username, auth.password, auth.type]) ret = " ".join([auth.username, auth.password, auth.type])
return ret return ret
# Just return some GET var
@live_server.app.route('/test-return-query', methods=['GET'])
def test_return_query():
return request.query_string
live_server.start() live_server.start()

View File

@@ -45,6 +45,9 @@ services:
# Respect proxy_pass type settings, `proxy_set_header Host "localhost";` and `proxy_set_header X-Forwarded-Prefix /app;` # Respect proxy_pass type settings, `proxy_set_header Host "localhost";` and `proxy_set_header X-Forwarded-Prefix /app;`
# More here https://github.com/dgtlmoon/changedetection.io/wiki/Running-changedetection.io-behind-a-reverse-proxy-sub-directory # More here https://github.com/dgtlmoon/changedetection.io/wiki/Running-changedetection.io-behind-a-reverse-proxy-sub-directory
# - USE_X_SETTINGS=1 # - USE_X_SETTINGS=1
#
# Hides the `Referer` header so that monitored websites can't see the changedetection.io hostname.
# - HIDE_REFERER=true
# Comment out ports: when using behind a reverse proxy , enable networks: etc. # Comment out ports: when using behind a reverse proxy , enable networks: etc.
ports: ports:

View File

@@ -1,36 +1,36 @@
flask~= 2.0 flask~=2.0
flask_wtf flask_wtf
eventlet>=0.31.0 eventlet>=0.31.0
validators validators
timeago ~=1.0 timeago~=1.0
inscriptis ~= 2.2 inscriptis~=2.2
feedgen ~= 0.9 feedgen~=0.9
flask-login ~= 0.5 flask-login~=0.5
flask_restful flask_restful
pytz pytz
# Set these versions together to avoid a RequestsDependencyWarning # Set these versions together to avoid a RequestsDependencyWarning
# >= 2.26 also adds Brotli support if brotli is installed # >= 2.26 also adds Brotli support if brotli is installed
brotli ~= 1.0 brotli~=1.0
requests[socks] ~= 2.28 requests[socks] ~=2.28
urllib3 > 1.26 urllib3>1.26
chardet > 2.3.0 chardet>2.3.0
wtforms ~= 3.0 wtforms~=3.0
jsonpath-ng ~= 1.5.3 jsonpath-ng~=1.5.3
# jq not available on Windows so must be installed manually # jq not available on Windows so must be installed manually
# Notification library # Notification library
apprise ~= 1.1.0 apprise~=1.1.0
# apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315 # apprise mqtt https://github.com/dgtlmoon/changedetection.io/issues/315
paho-mqtt paho-mqtt
# Pinned version of cryptography otherwise # Pinned version of cryptography otherwise
# ERROR: Could not build wheels for cryptography which use PEP 517 and cannot be installed directly # ERROR: Could not build wheels for cryptography which use PEP 517 and cannot be installed directly
cryptography ~= 3.4 cryptography~=3.4
# Used for CSS filtering # Used for CSS filtering
bs4 bs4
@@ -39,11 +39,20 @@ bs4
lxml lxml
# 3.141 was missing socksVersion, 3.150 was not in pypi, so we try 4.1.0 # 3.141 was missing socksVersion, 3.150 was not in pypi, so we try 4.1.0
selenium ~= 4.1.0 selenium~=4.1.0
# https://stackoverflow.com/questions/71652965/importerror-cannot-import-name-safe-str-cmp-from-werkzeug-security/71653849#71653849 # https://stackoverflow.com/questions/71652965/importerror-cannot-import-name-safe-str-cmp-from-werkzeug-security/71653849#71653849
# ImportError: cannot import name 'safe_str_cmp' from 'werkzeug.security' # ImportError: cannot import name 'safe_str_cmp' from 'werkzeug.security'
# need to revisit flask login versions # need to revisit flask login versions
werkzeug ~= 2.0.0 werkzeug~=2.0.0
# Templating, so far just in the URLs but in the future can be for the notifications also
jinja2~=3.1
jinja2-time
# https://peps.python.org/pep-0508/#environment-markers
# https://github.com/dgtlmoon/changedetection.io/pull/1009
jq~=1.3 ;python_version >= "3.8" and sys_platform == "linux"
# playwright is installed at Dockerfile build time because it's not available on all platforms # playwright is installed at Dockerfile build time because it's not available on all platforms