Compare commits

..

56 Commits

Author SHA1 Message Date
dgtlmoon
a6eff9977a Maybe this solves cryptography <-> rustc 2024-06-18 11:13:00 +02:00
dgtlmoon
7164f33967 Merge branch 'master' into 2039-restock-use-itemprop 2024-06-18 10:27:43 +02:00
dgtlmoon
43ddfa5005 Add delay 2024-06-17 18:42:06 +02:00
dgtlmoon
4ba847e481 hmm 2024-06-17 16:40:13 +02:00
dgtlmoon
f9f83e6663 hmm 2024-06-17 16:16:07 +02:00
dgtlmoon
d135240bd0 store everything 2024-06-17 15:24:54 +02:00
dgtlmoon
9b8b76084a add delay for GH 2024-06-17 14:41:54 +02:00
dgtlmoon
a59a674f7f Merge branch 'master' into 2039-restock-use-itemprop 2024-06-17 13:38:47 +02:00
dgtlmoon
8915171b84 not used 2024-06-17 11:50:10 +02:00
dgtlmoon
7aa1e5cc00 Merge branch 'master' into 2039-restock-use-itemprop 2024-06-17 11:49:55 +02:00
dgtlmoon
dcd000e2d3 add check delays 2024-06-14 14:36:37 +02:00
dgtlmoon
6824fa22c5 workaround for no previous price present 2024-06-13 18:00:11 +02:00
dgtlmoon
afc88d654b Merge branch 'master' into 2039-restock-use-itemprop 2024-06-13 16:53:46 +02:00
dgtlmoon
b39c770f84 adding % threshold handling 2024-06-12 18:29:44 +02:00
dgtlmoon
575bdcfbe8 WIP 2024-06-12 18:11:20 +02:00
dgtlmoon
32579d7800 fix debug message 2024-06-12 16:44:20 +02:00
dgtlmoon
37a021a701 fix styling 2024-06-12 16:43:10 +02:00
dgtlmoon
b9b0a9260d fix 2024-06-12 16:39:09 +02:00
dgtlmoon
99f7f78798 fix test 2024-06-12 16:31:46 +02:00
dgtlmoon
e756002b9a min/mnax price check 2024-06-12 15:25:21 +02:00
dgtlmoon
cf43852645 Fixing test setup 2024-06-12 14:44:45 +02:00
dgtlmoon
ded4495801 Adding follow_price_changes 2024-06-12 14:43:54 +02:00
dgtlmoon
18f80e1592 abstract out the extra tab 2024-06-12 10:32:21 +02:00
dgtlmoon
e1c903be0c part 1/2 - Abstract out form handling for the form class 2024-06-11 17:26:06 +02:00
dgtlmoon
f983ec2ff0 Merge branch 'master' into 2039-restock-use-itemprop 2024-06-07 13:56:33 +02:00
dgtlmoon
6024d020be Merge branch 'master' into 2039-restock-use-itemprop 2024-06-05 12:32:55 +02:00
dgtlmoon
f5dba66c48 small fix 2024-06-04 15:28:48 +02:00
dgtlmoon
fa6209687a Merge branch 'master' into 2039-restock-use-itemprop 2024-06-04 14:39:57 +02:00
dgtlmoon
d0acc59a13 Merge branch 'master' into 2039-restock-use-itemprop 2024-06-04 12:12:01 +02:00
dgtlmoon
c1a0481ec0 skip rdfa? 2024-05-23 09:59:22 +02:00
dgtlmoon
d1528bbe89 Misc tweaks 2024-05-22 16:14:41 +02:00
dgtlmoon
929044581b tweak imports 2024-05-22 15:55:30 +02:00
dgtlmoon
df6d120c4f Merge branch 'master' into 2039-restock-use-itemprop 2024-05-22 15:46:47 +02:00
dgtlmoon
17c87f494d Merge branch 'ui-search-error-messages' into 2039-restock-use-itemprop 2024-05-10 18:15:36 +02:00
dgtlmoon
9ffa4eda3d UI - Search should scan/search error messages 2024-05-10 17:59:29 +02:00
dgtlmoon
d38bb6167b big refactor 2024-05-10 17:52:28 +02:00
dgtlmoon
32e074da2b Merge branch 'master' into 2039-restock-use-itemprop 2024-05-07 15:24:33 +02:00
dgtlmoon
ceeb4d54b7 tweak styles 2024-05-07 15:16:23 +02:00
dgtlmoon
8680a29777 UI tweaks 2024-05-07 15:01:41 +02:00
dgtlmoon
41a6c608b1 test note needed 2024-05-07 11:42:09 +02:00
dgtlmoon
b1170c1e33 WIP 2024-05-07 11:34:28 +02:00
dgtlmoon
0b85990d28 more work 2024-05-06 16:20:08 +02:00
dgtlmoon
345b54d401 add more test examples 2024-05-03 14:53:35 +02:00
dgtlmoon
2aa8ca0e58 tweaks 2024-05-03 14:47:13 +02:00
dgtlmoon
fe8b7b8162 Adding test 2024-05-03 11:07:40 +02:00
dgtlmoon
86f1d8bc0a Make it testable 2024-05-03 09:18:41 +02:00
dgtlmoon
49246bcaf9 Merge branch '2039-restock-use-itemprop' of github.com:dgtlmoon/changedetection.io into 2039-restock-use-itemprop 2024-05-03 09:16:33 +02:00
dgtlmoon
37d081cc43 Merge branch 'master' into 2039-restock-use-itemprop 2024-05-03 09:15:54 +02:00
dgtlmoon
b71b457c24 Merge branch 'master' into 2039-restock-use-itemprop 2024-01-14 18:26:24 +01:00
dgtlmoon
61185b5514 Tidy imports 2023-12-09 13:21:21 +01:00
dgtlmoon
69513c47cd Offer more helpful text 2023-12-09 13:17:57 +01:00
dgtlmoon
8090d0dac6 cleanup 2023-12-09 13:08:52 +01:00
dgtlmoon
0f9d3a0dcf logic fixes and force faster xpath1 2023-12-09 13:05:40 +01:00
dgtlmoon
a16b129731 Simplify logic 2023-12-09 12:56:02 +01:00
dgtlmoon
d58c24db44 Set false text also 2023-12-08 17:37:05 +01:00
dgtlmoon
436c412faf Re #2039 - Use itemprop where available 2023-12-08 17:16:05 +01:00
51 changed files with 1315 additions and 1148 deletions

View File

@@ -88,7 +88,7 @@ jobs:
- name: Build and push :dev
id: docker_build
if: ${{ github.ref }} == "refs/heads/master"
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: ./
file: ./Dockerfile
@@ -106,7 +106,7 @@ jobs:
- name: Build and push :tag
id: docker_build_tag_release
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: ./
file: ./Dockerfile

View File

@@ -51,7 +51,7 @@ jobs:
# Check we can still build under alpine/musl
- name: Test that the docker containers can build (musl via alpine check)
id: docker_build_musl
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: ./
file: ./.github/test/Dockerfile-alpine
@@ -59,7 +59,7 @@ jobs:
- name: Test that the docker containers can build
id: docker_build
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
# https://github.com/docker/build-push-action#customizing
with:
context: ./

View File

@@ -93,7 +93,7 @@ jobs:
- name: Playwright and SocketPuppetBrowser - Headers and requests
run: |
# Settings headers playwright tests - Call back in from Sockpuppetbrowser, check headers
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py'
docker run --name "changedet" --hostname changedet --rm -e "FLASK_SERVER_NAME=changedet" -e "PLAYWRIGHT_DRIVER_URL=ws://sockpuppetbrowser:3000?dumpio=true" --network changedet-network test-changedetectionio bash -c 'find .; cd changedetectionio; pytest --live-server-host=0.0.0.0 --live-server-port=5004 tests/test_request.py; pwd;find .'
- name: Playwright and SocketPuppetBrowser - Restock detection
run: |
@@ -231,9 +231,9 @@ jobs:
docker logs test-cdio-basic-tests > output-logs/test-cdio-basic-tests-stdout-${{ env.PYTHON_VERSION }}.txt
docker logs test-cdio-basic-tests 2> output-logs/test-cdio-basic-tests-stderr-${{ env.PYTHON_VERSION }}.txt
- name: Store container log
- name: Store everything including test-datastore
if: always()
uses: actions/upload-artifact@v4
with:
name: test-cdio-basic-tests-output-py${{ env.PYTHON_VERSION }}
path: output-logs
path: .

View File

@@ -3,9 +3,9 @@
# @NOTE! I would love to move to 3.11 but it breaks the async handler in changedetectionio/content_fetchers/puppeteer.py
# If you know how to fix it, please do! and test it for both 3.10 and 3.11
ARG PYTHON_VERSION=3.11
ARG PYTHON_VERSION=3.10
FROM python:${PYTHON_VERSION}-slim-bookworm AS builder
FROM python:${PYTHON_VERSION}-slim-bookworm as builder
# See `cryptography` pin comment in requirements.txt
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1

View File

@@ -2,7 +2,7 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
__version__ = '0.45.25'
__version__ = '0.45.24'
from changedetectionio.strtobool import strtobool
from json.decoder import JSONDecodeError

View File

@@ -12,9 +12,10 @@ import copy
# See docs/README.md for rebuilding the docs/apidoc information
from . import api_schema
from ..model import watch_base
# Build a JSON Schema atleast partially based on our Watch model
from changedetectionio.model.Watch import base_config as watch_base_config
watch_base_config = watch_base()
schema = api_schema.build_watch_json_schema(watch_base_config)
schema_create_watch = copy.deepcopy(schema)
@@ -170,33 +171,23 @@ class WatchSingleHistory(Resource):
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
@apiName Get single snapshot content
@apiGroup Watch History
@apiParam {String} [html] Optional Set to =1 to return the last HTML (only stores last 2 snapshots, use `latest` as timestamp)
@apiSuccess (200) {String} OK
@apiSuccess (404) {String} ERR Not found
"""
watch = self.datastore.data['watching'].get(uuid)
if not watch:
abort(404, message=f"No watch exists with the UUID of {uuid}")
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
if not len(watch.history):
abort(404, message=f"Watch found but no history exists for the UUID {uuid}")
abort(404, message='Watch found but no history exists for the UUID {}'.format(uuid))
if timestamp == 'latest':
timestamp = list(watch.history.keys())[-1]
if request.args.get('html'):
content = watch.get_fetched_html(timestamp)
if content:
response = make_response(content, 200)
response.mimetype = "text/html"
else:
response = make_response("No content found", 404)
response.mimetype = "text/plain"
else:
content = watch.get_history_snapshot(timestamp)
response = make_response(content, 200)
response.mimetype = "text/plain"
content = watch.get_history_snapshot(timestamp)
response = make_response(content, 200)
response.mimetype = "text/plain"
return response

View File

@@ -187,10 +187,8 @@ def construct_blueprint(datastore: ChangeDetectionStore):
u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url
if is_last_step and u:
(screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data()
watch = datastore.data['watching'].get(uuid)
if watch:
watch.save_screenshot(screenshot=screenshot)
watch.save_xpath_data(data=xpath_data)
datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot)
datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data)
# if not this_session.page:
# cleanup_playwright_session()

View File

@@ -256,8 +256,7 @@ class browsersteps_live_ui(steppable_browser_interface):
def get_current_state(self):
"""Return the screenshot and interactive elements mapping, generally always called after action_()"""
import importlib.resources
xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text()
xpath_element_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", "xpath_element_scraper.js")
now = time.time()
self.page.wait_for_timeout(1 * 1000)
@@ -290,7 +289,7 @@ class browsersteps_live_ui(steppable_browser_interface):
"""
import importlib.resources
self.page.evaluate("var include_filters=''")
xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text()
xpath_element_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", "xpath_element_scraper.js")
from changedetectionio.content_fetchers import visualselector_xpath_selectors
xpath_element_js = xpath_element_js.replace('%ELEMENTS%', visualselector_xpath_selectors)
xpath_data = self.page.evaluate("async () => {" + xpath_element_js + "}")

View File

@@ -17,6 +17,7 @@ def construct_blueprint(datastore: ChangeDetectionStore, update_q: PriorityQueue
@price_data_follower_blueprint.route("/<string:uuid>/accept", methods=['GET'])
def accept(uuid):
datastore.data['watching'][uuid]['track_ldjson_price_data'] = PRICE_DATA_TRACK_ACCEPT
datastore.data['watching'][uuid]['processor'] = 'restock_diff'
update_q.put(queuedWatchMetaData.PrioritizedItem(priority=1, item={'uuid': uuid, 'skip_when_checksum_same': False}))
return redirect(url_for("index"))

View File

@@ -103,7 +103,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
default = datastore.data['settings']['application']['tags'].get(uuid)
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
form = forms.processor_text_json_diff_form(formdata=request.form if request.method == 'POST' else None,
data=default,
)
form.datastore=datastore # needed?
@@ -126,7 +126,7 @@ def construct_blueprint(datastore: ChangeDetectionStore):
default = datastore.data['settings']['application']['tags'].get(uuid)
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
form = forms.processor_text_json_diff_form(formdata=request.form if request.method == 'POST' else None,
data=default,
)
# @todo subclass form so validation works

View File

@@ -63,7 +63,7 @@ xpath://body/div/span[contains(@class, 'example-class')]",
<ul>
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
{% if jq_support %}
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>. Prefix <code>jqraw:</code> outputs the results as text instead of a JSON list.</li>
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
{% else %}
<li>jq support not installed</li>
{% endif %}

View File

@@ -65,8 +65,8 @@ class Fetcher():
def __init__(self):
import importlib.resources
self.xpath_element_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('xpath_element_scraper.js').read_text()
self.instock_data_js = importlib.resources.files("changedetectionio.content_fetchers.res").joinpath('stock-not-in-stock.js').read_text()
self.xpath_element_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", 'xpath_element_scraper.js')
self.instock_data_js = importlib.resources.read_text("changedetectionio.content_fetchers.res", 'stock-not-in-stock.js')
@abstractmethod
def get_error(self):

View File

@@ -87,12 +87,11 @@ class ScreenshotUnavailable(Exception):
class ReplyWithContentButNoText(Exception):
def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content='', xpath_data=None):
def __init__(self, status_code, url, screenshot=None, has_filters=False, html_content=''):
# Set this so we can use it in other parts of the app
self.status_code = status_code
self.url = url
self.screenshot = screenshot
self.has_filters = has_filters
self.html_content = html_content
self.xpath_data = xpath_data
return

View File

@@ -30,21 +30,14 @@ function isItemInStock() {
'dieser artikel ist bald wieder verfügbar',
'dostępne wkrótce',
'en rupture de stock',
'isn\'t in stock right now',
'isnt in stock right now',
'isnt in stock right now',
'ist derzeit nicht auf lager',
'item is no longer available',
'let me know when it\'s available',
'mail me when available',
'message if back in stock',
'nachricht bei',
'nicht auf lager',
'nicht lagernd',
'nicht lieferbar',
'nicht verfügbar',
'nicht vorrätig',
'nicht zur verfügung',
'nie znaleziono produktów',
'niet beschikbaar',
'niet leverbaar',
'niet op voorraad',
@@ -55,7 +48,6 @@ function isItemInStock() {
'not currently available',
'not in stock',
'notify me when available',
'notify me',
'notify when available',
'não estamos a aceitar encomendas',
'out of stock',
@@ -70,16 +62,12 @@ function isItemInStock() {
'this item is currently unavailable',
'tickets unavailable',
'tijdelijk uitverkocht',
'unavailable nearby',
'unavailable tickets',
'vergriffen',
'vorbestellen',
'vorbestellung ist bald möglich',
'we couldn\'t find any products that match',
'we do not currently have an estimate of when this product will be back in stock.',
'we don\'t know when or if this item will be back in stock.',
'we were not able to find a match',
'when this arrives in stock',
'zur zeit nicht an lager',
'品切れ',
'已售',

View File

@@ -182,7 +182,6 @@ visibleElementsArray.forEach(function (element) {
// Inject the current one set in the include_filters, which may be a CSS rule
// used for displaying the current one in VisualSelector, where its not one we generated.
if (include_filters.length) {
let results;
// Foreach filter, go and find it on the page and add it to the results so we can visualise it again
for (const f of include_filters) {
bbox = false;
@@ -198,15 +197,10 @@ if (include_filters.length) {
if (f.startsWith('/') || f.startsWith('xpath')) {
var qry_f = f.replace(/xpath(:|\d:)/, '')
console.log("[xpath] Scanning for included filter " + qry_f)
let xpathResult = document.evaluate(qry_f, document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null);
results = [];
for (let i = 0; i < xpathResult.snapshotLength; i++) {
results.push(xpathResult.snapshotItem(i));
}
q = document.evaluate(qry_f, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue;
} else {
console.log("[css] Scanning for included filter " + f)
console.log("[css] Scanning for included filter " + f);
results = document.querySelectorAll(f);
q = document.querySelector(f);
}
} catch (e) {
// Maybe catch DOMException and alert?
@@ -214,45 +208,44 @@ if (include_filters.length) {
console.log(e);
}
if (results.length) {
if (q) {
// Try to resolve //something/text() back to its /something so we can atleast get the bounding box
try {
if (typeof q.nodeName == 'string' && q.nodeName === '#text') {
q = q.parentElement
}
} catch (e) {
console.log(e)
console.log("xpath_element_scraper: #text resolver")
}
// Iterate over the results
results.forEach(node => {
// Try to resolve //something/text() back to its /something so we can atleast get the bounding box
// #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
if (typeof q.getBoundingClientRect == 'function') {
bbox = q.getBoundingClientRect();
console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
} else {
try {
if (typeof node.nodeName == 'string' && node.nodeName === '#text') {
node = node.parentElement
}
// Try and see we can find its ownerElement
bbox = q.ownerElement.getBoundingClientRect();
console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
} catch (e) {
console.log(e)
console.log("xpath_element_scraper: #text resolver")
console.log("xpath_element_scraper: error looking up q.ownerElement")
}
}
}
// #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
if (typeof node.getBoundingClientRect == 'function') {
bbox = node.getBoundingClientRect();
console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
} else {
try {
// Try and see we can find its ownerElement
bbox = node.ownerElement.getBoundingClientRect();
console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
} catch (e) {
console.log(e)
console.log("xpath_element_scraper: error looking up q.ownerElement")
}
}
if (!q) {
console.log("xpath_element_scraper: filter element " + f + " was not found");
}
if (bbox && bbox['width'] > 0 && bbox['height'] > 0) {
size_pos.push({
xpath: f,
width: parseInt(bbox['width']),
height: parseInt(bbox['height']),
left: parseInt(bbox['left']),
top: parseInt(bbox['top']) + scroll_y,
highlight_as_custom_filter: true
});
}
if (bbox && bbox['width'] > 0 && bbox['height'] > 0) {
size_pos.push({
xpath: f,
width: parseInt(bbox['width']),
height: parseInt(bbox['height']),
left: parseInt(bbox['left']),
top: parseInt(bbox['top']) + scroll_y
});
}
}

View File

@@ -1,97 +1,62 @@
# used for the notifications, the front-end is using a JS library
import difflib
from typing import List, Iterator, Union
def same_slicer(lst: List[str], start: int, end: int) -> List[str]:
"""Return a slice of the list, or a single element if start == end."""
return lst[start:end] if start != end else [lst[start]]
def customSequenceMatcher(
before: List[str],
after: List[str],
include_equal: bool = False,
include_removed: bool = True,
include_added: bool = True,
include_replaced: bool = True,
include_change_type_prefix: bool = True
) -> Iterator[List[str]]:
"""
Compare two sequences and yield differences based on specified parameters.
Args:
before (List[str]): Original sequence
after (List[str]): Modified sequence
include_equal (bool): Include unchanged parts
include_removed (bool): Include removed parts
include_added (bool): Include added parts
include_replaced (bool): Include replaced parts
include_change_type_prefix (bool): Add prefixes to indicate change types
Yields:
List[str]: Differences between sequences
"""
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \t", a=before, b=after)
def same_slicer(l, a, b):
if a == b:
return [l[a]]
else:
return l[a:b]
# like .compare but a little different output
def customSequenceMatcher(before, after, include_equal=False, include_removed=True, include_added=True, include_replaced=True, include_change_type_prefix=True):
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \\t", a=before, b=after)
# @todo Line-by-line mode instead of buncghed, including `after` that is not in `before` (maybe unset?)
for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
if include_equal and tag == 'equal':
yield before[alo:ahi]
g = before[alo:ahi]
yield g
elif include_removed and tag == 'delete':
prefix = "(removed) " if include_change_type_prefix else ''
yield [f"{prefix}{line}" for line in same_slicer(before, alo, ahi)]
row_prefix = "(removed) " if include_change_type_prefix else ''
g = [ row_prefix + i for i in same_slicer(before, alo, ahi)]
yield g
elif include_replaced and tag == 'replace':
prefix_changed = "(changed) " if include_change_type_prefix else ''
prefix_into = "(into) " if include_change_type_prefix else ''
yield [f"{prefix_changed}{line}" for line in same_slicer(before, alo, ahi)] + \
[f"{prefix_into}{line}" for line in same_slicer(after, blo, bhi)]
row_prefix = "(changed) " if include_change_type_prefix else ''
g = [row_prefix + i for i in same_slicer(before, alo, ahi)]
row_prefix = "(into) " if include_change_type_prefix else ''
g += [row_prefix + i for i in same_slicer(after, blo, bhi)]
yield g
elif include_added and tag == 'insert':
prefix = "(added) " if include_change_type_prefix else ''
yield [f"{prefix}{line}" for line in same_slicer(after, blo, bhi)]
row_prefix = "(added) " if include_change_type_prefix else ''
g = [row_prefix + i for i in same_slicer(after, blo, bhi)]
yield g
def render_diff(
previous_version_file_contents: str,
newest_version_file_contents: str,
include_equal: bool = False,
include_removed: bool = True,
include_added: bool = True,
include_replaced: bool = True,
line_feed_sep: str = "\n",
include_change_type_prefix: bool = True,
patch_format: bool = False
) -> str:
"""
Render the difference between two file contents.
Args:
previous_version_file_contents (str): Original file contents
newest_version_file_contents (str): Modified file contents
include_equal (bool): Include unchanged parts
include_removed (bool): Include removed parts
include_added (bool): Include added parts
include_replaced (bool): Include replaced parts
line_feed_sep (str): Separator for lines in output
include_change_type_prefix (bool): Add prefixes to indicate change types
patch_format (bool): Use patch format for output
Returns:
str: Rendered difference
"""
newest_lines = [line.rstrip() for line in newest_version_file_contents.splitlines()]
previous_lines = [line.rstrip() for line in previous_version_file_contents.splitlines()] if previous_version_file_contents else []
# only_differences - only return info about the differences, no context
# line_feed_sep could be "<br>" or "<li>" or "\n" etc
def render_diff(previous_version_file_contents, newest_version_file_contents, include_equal=False, include_removed=True, include_added=True, include_replaced=True, line_feed_sep="\n", include_change_type_prefix=True, patch_format=False):
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
if previous_version_file_contents:
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
else:
previous_version_file_contents = ""
if patch_format:
patch = difflib.unified_diff(previous_lines, newest_lines)
patch = difflib.unified_diff(previous_version_file_contents, newest_version_file_contents)
return line_feed_sep.join(patch)
rendered_diff = customSequenceMatcher(
before=previous_lines,
after=newest_lines,
include_equal=include_equal,
include_removed=include_removed,
include_added=include_added,
include_replaced=include_replaced,
include_change_type_prefix=include_change_type_prefix
)
rendered_diff = customSequenceMatcher(before=previous_version_file_contents,
after=newest_version_file_contents,
include_equal=include_equal,
include_removed=include_removed,
include_added=include_added,
include_replaced=include_replaced,
include_change_type_prefix=include_change_type_prefix)
def flatten(lst: List[Union[str, List[str]]]) -> str:
return line_feed_sep.join(flatten(x) if isinstance(x, list) else x for x in lst)
return flatten(rendered_diff)
# Recursively join lists
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
p= f(rendered_diff)
return p

View File

@@ -5,6 +5,9 @@ import os
import queue
import threading
import time
from jinja2 import Template
from .safe_jinja import render as jinja_render
from changedetectionio.strtobool import strtobool
from copy import deepcopy
@@ -616,7 +619,6 @@ def changedetection_app(config=None, datastore_o=None):
@login_optionally_required
# https://stackoverflow.com/questions/42984453/wtforms-populate-form-with-data-if-data-exists
# https://wtforms.readthedocs.io/en/3.0.x/forms/#wtforms.form.Form.populate_obj ?
def edit_page(uuid):
from . import forms
from .blueprint.browser_steps.browser_steps import browser_step_ui_config
@@ -652,9 +654,17 @@ def changedetection_app(config=None, datastore_o=None):
# Radio needs '' not None, or incase that the chosen one no longer exists
if default['proxy'] is None or not any(default['proxy'] in tup for tup in datastore.proxy_list):
default['proxy'] = ''
# proxy_override set to the json/text list of the items
form = forms.watchForm(formdata=request.form if request.method == 'POST' else None,
processor = datastore.data['watching'][uuid].get('processor', '')
form_class_name = f"processor_{processor}_form"
try:
form_class = getattr(forms, form_class_name)
except AttributeError:
flash(f"Cannot load the edit form for processor/plugin '{processor}', plugin missing?", 'error')
return redirect(url_for('index'))
form = form_class(formdata=request.form if request.method == 'POST' else None,
data=default
)
@@ -721,7 +731,7 @@ def changedetection_app(config=None, datastore_o=None):
datastore.data['watching'][uuid].update(extra_update_obj)
if request.args.get('unpause_on_save'):
flash("Updated watch - unpaused!")
flash("Updated watch - unpaused!.")
else:
flash("Updated watch.")
@@ -761,23 +771,38 @@ def changedetection_app(config=None, datastore_o=None):
# Only works reliably with Playwright
visualselector_enabled = os.getenv('PLAYWRIGHT_DRIVER_URL', False) and is_html_webdriver
template_args = {
'available_processors': processors.available_processors(),
'browser_steps_config': browser_step_ui_config,
'emailprefix': os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
'extra_title': f" - Edit - {watch.label}",
'extra_processor_config': form.extra_tab_content(),
'form': form,
'has_default_notification_urls': True if len(datastore.data['settings']['application']['notification_urls']) else False,
'has_extra_headers_file': len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
'has_special_tag_options': _watch_has_tag_options_set(watch=watch),
'is_html_webdriver': is_html_webdriver,
'jq_support': jq_support,
'playwright_enabled': os.getenv('PLAYWRIGHT_DRIVER_URL', False),
'settings_application': datastore.data['settings']['application'],
'using_global_webdriver_wait': not default['webdriver_delay'],
'uuid': uuid,
'visualselector_enabled': visualselector_enabled,
'watch': watch
}
included_content = None
if form.extra_form_content():
# So that the extra panels can access _helpers.html etc
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('changedetectionio/templates'))
template = env.from_string(form.extra_form_content())
included_content = template.render(**template_args)
output = render_template("edit.html",
available_processors=processors.available_processors(),
browser_steps_config=browser_step_ui_config,
emailprefix=os.getenv('NOTIFICATION_MAIL_BUTTON_PREFIX', False),
extra_title=f" - Edit - {watch.label}",
form=form,
has_default_notification_urls=True if len(datastore.data['settings']['application']['notification_urls']) else False,
has_extra_headers_file=len(datastore.get_all_headers_in_textfile_for_watch(uuid=uuid)) > 0,
has_special_tag_options=_watch_has_tag_options_set(watch=watch),
is_html_webdriver=is_html_webdriver,
jq_support=jq_support,
playwright_enabled=os.getenv('PLAYWRIGHT_DRIVER_URL', False),
settings_application=datastore.data['settings']['application'],
using_global_webdriver_wait=not default['webdriver_delay'],
uuid=uuid,
visualselector_enabled=visualselector_enabled,
watch=watch
extra_tab_content=form.extra_tab_content() if form.extra_tab_content() else None,
extra_form_content=included_content,
**template_args
)
return output

View File

@@ -1,5 +1,8 @@
import os
import re
from wtforms.fields.numeric import FloatField
from changedetectionio.strtobool import strtobool
from wtforms import (
@@ -447,7 +450,7 @@ class SingleBrowserStep(Form):
# remove_button = SubmitField('-', render_kw={"type": "button", "class": "pure-button pure-button-primary", 'title': 'Remove'})
# add_button = SubmitField('+', render_kw={"type": "button", "class": "pure-button pure-button-primary", 'title': 'Add new step after'})
class watchForm(commonSettingsForm):
class processor_text_json_diff_form(commonSettingsForm):
url = fields.URLField('URL', validators=[validateURL()])
tags = StringTagUUID('Group tag', [validators.Optional()], default='')
@@ -475,9 +478,6 @@ class watchForm(commonSettingsForm):
filter_text_replaced = BooleanField('Replaced/changed lines', default=True)
filter_text_removed = BooleanField('Removed lines', default=True)
# @todo this class could be moved to its own text_json_diff_watchForm and this goes to restock_diff_Watchform perhaps
in_stock_only = BooleanField('Only trigger when product goes BACK to in-stock', default=True)
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
browser_steps = FieldList(FormField(SingleBrowserStep), min_entries=10)
@@ -493,6 +493,12 @@ class watchForm(commonSettingsForm):
notification_muted = BooleanField('Notifications Muted / Off', default=False)
notification_screenshot = BooleanField('Attach screenshot to notification (where possible)', default=False)
def extra_tab_content(self):
return None
def extra_form_content(self):
return None
def validate(self, **kwargs):
if not super().validate():
return False
@@ -514,6 +520,58 @@ class watchForm(commonSettingsForm):
return result
class processor_restock_diff_form(processor_text_json_diff_form):
in_stock_only = BooleanField('Only trigger when product goes BACK to in-stock', default=True)
price_change_min = FloatField('Minimum amount to trigger notification', [validators.Optional()], render_kw={"placeholder": "No limit", "size": "10"})
price_change_max = FloatField('Maximum amount to trigger notification', [validators.Optional()], render_kw={"placeholder": "No limit", "size": "10"})
price_change_threshold_percent = FloatField('Threshold in % for price changes', validators=[
validators.Optional(),
validators.NumberRange(min=0, max=100, message="Should be between 0 and 100"),
], render_kw={"placeholder": "0%", "size": "5"})
follow_price_changes = BooleanField('Follow price changes', default=False)
def extra_tab_content(self):
return 'Restock & Price Detection'
def extra_form_content(self):
return """
{% from '_helpers.html' import render_field, render_checkbox_field, render_button %}
<script>
$(document).ready(function () {
toggleOpacity('#follow_price_changes', '.price-change-minmax', true);
});
</script>
<fieldset>
<div class="pure-control-group">
<fieldset class="pure-group">
{{ render_checkbox_field(form.in_stock_only) }}
<span class="pure-form-message-inline">Only trigger notifications when page changes from <strong>out of stock</strong> to <strong>back in stock</strong></span>
</fieldset>
<fieldset class="pure-group">
{{ render_checkbox_field(form.follow_price_changes) }}
<span class="pure-form-message-inline">Changes in price should trigger a notification</span>
<span class="pure-form-message-inline">When OFF - only care about restock detection</span>
</fieldset>
<fieldset class="pure-group price-change-minmax">
{{ render_field(form.price_change_min) }}
<span class="pure-form-message-inline">Minimum amount, only trigger a change when the price is less than this amount.</span>
</fieldset>
<fieldset class="pure-group price-change-minmax">
{{ render_field(form.price_change_max) }}
<span class="pure-form-message-inline">Maximum amount, only trigger a change when the price is more than this amount.</span>
</fieldset>
<fieldset class="pure-group price-change-minmax">
{{ render_field(form.price_change_threshold_percent) }}
<span class="pure-form-message-inline">Price must change more than this % to trigger a change.</span><br>
<span class="pure-form-message-inline">For example, If the product is $1,000 USD, <strong>2%</strong> would mean it has to change more than $20 since the last check.</span><br>
</fieldset>
</div>
</fieldset>"""
class SingleExtraProxy(Form):
# maybe better to set some <script>var..

View File

@@ -3,6 +3,8 @@ from bs4 import BeautifulSoup
from inscriptis import get_text
from jsonpath_ng.ext import parse
from typing import List
from inscriptis.css_profiles import CSS_PROFILES, HtmlElement
from inscriptis.html_properties import Display
from inscriptis.model.config import ParserConfig
from xml.sax.saxutils import escape as xml_escape
import json
@@ -194,12 +196,12 @@ def extract_element(find='title', html_content=''):
#
def _parse_json(json_data, json_filter):
if json_filter.startswith("json:"):
if 'json:' in json_filter:
jsonpath_expression = parse(json_filter.replace('json:', ''))
match = jsonpath_expression.find(json_data)
return _get_stripped_text_from_json_match(match)
if json_filter.startswith("jq:") or json_filter.startswith("jqraw:"):
if 'jq:' in json_filter:
try:
import jq
@@ -207,15 +209,10 @@ def _parse_json(json_data, json_filter):
# `jq` requires full compilation in windows and so isn't generally available
raise Exception("jq not support not found")
if json_filter.startswith("jq:"):
jq_expression = jq.compile(json_filter.removeprefix("jq:"))
match = jq_expression.input(json_data).all()
return _get_stripped_text_from_json_match(match)
jq_expression = jq.compile(json_filter.replace('jq:', ''))
match = jq_expression.input(json_data).all()
if json_filter.startswith("jqraw:"):
jq_expression = jq.compile(json_filter.removeprefix("jqraw:"))
match = jq_expression.input(json_data).all()
return '\n'.join(str(item) for item in match)
return _get_stripped_text_from_json_match(match)
def _get_stripped_text_from_json_match(match):
s = []
@@ -243,7 +240,7 @@ def _get_stripped_text_from_json_match(match):
# ensure_is_ldjson_info_type - str "product", optional, "@type == product" (I dont know how to do that as a json selector)
def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None):
stripped_text_from_html = False
# https://github.com/dgtlmoon/changedetection.io/pull/2041#issuecomment-1848397161w
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded within HTML tags
try:
stripped_text_from_html = _parse_json(json.loads(content), json_filter)
@@ -282,17 +279,19 @@ def extract_json_as_string(content, json_filter, ensure_is_ldjson_info_type=None
if isinstance(json_data, dict):
# If it has LD JSON 'key' @type, and @type is 'product', and something was found for the search
# (Some sites have multiple of the same ld+json @type='product', but some have the review part, some have the 'price' part)
# @type could also be a list (Product, SubType)
# @type could also be a list although non-standard ("@type": ["Product", "SubType"],)
# LD_JSON auto-extract also requires some content PLUS the ldjson to be present
# 1833 - could be either str or dict, should not be anything else
if json_data.get('@type') and stripped_text_from_html:
try:
if json_data.get('@type') == str or json_data.get('@type') == dict:
types = [json_data.get('@type')] if isinstance(json_data.get('@type'), str) else json_data.get('@type')
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in types]:
break
except:
continue
t = json_data.get('@type')
if t and stripped_text_from_html:
if isinstance(t, str) and t.lower() == ensure_is_ldjson_info_type.lower():
break
# The non-standard part, some have a list
elif isinstance(t, list):
if ensure_is_ldjson_info_type.lower() in [x.lower().strip() for x in t]:
break
elif stripped_text_from_html:
break

View File

@@ -1,19 +1,14 @@
from .Watch import base_config
import uuid
class model(dict):
from changedetectionio.model import watch_base
class model(watch_base):
def __init__(self, *arg, **kw):
self.update(base_config)
self['uuid'] = str(uuid.uuid4())
super(model, self).__init__(*arg, **kw)
if kw.get('default'):
self.update(kw['default'])
del kw['default']
# Goes at the end so we update the default object with the initialiser
super(model, self).__init__(*arg, **kw)

View File

@@ -1,6 +1,6 @@
from changedetectionio.strtobool import strtobool
from changedetectionio.safe_jinja import render as jinja_render
from . import watch_base
import os
import re
import time
@@ -15,69 +15,6 @@ SAFE_PROTOCOL_REGEX='^(http|https|ftp|file):'
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 3))
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
from changedetectionio.notification import (
default_notification_format_for_watch
)
base_config = {
'body': None,
'browser_steps': [],
'browser_steps_last_error_step': None,
'check_unique_lines': False, # On change-detected, compare against all history if its something new
'check_count': 0,
'date_created': None,
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
'extract_text': [], # Extract text by regex after filters
'extract_title_as_title': False,
'fetch_backend': 'system', # plaintext, playwright etc
'fetch_time': 0.0,
'processor': 'text_json_diff', # could be restock_diff or others from .processors
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
'filter_text_added': True,
'filter_text_replaced': True,
'filter_text_removed': True,
'has_ldjson_price_data': None,
'track_ldjson_price_data': None,
'headers': {}, # Extra headers to send
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
'in_stock' : None,
'in_stock_only' : True, # Only trigger change on going to instock from out-of-stock
'include_filters': [],
'last_checked': 0,
'last_error': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'method': 'GET',
'notification_alert_count': 0,
# Custom notification content
'notification_body': None,
'notification_format': default_notification_format_for_watch,
'notification_muted': False,
'notification_title': None,
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
'paused': False,
'previous_md5': False,
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
'proxy': None, # Preferred proxy connection
'remote_server_reply': None, # From 'server' reply header
'sort_text_alphabetically': False,
'subtractive_selectors': [],
'tag': '', # Old system of text name for a tag, to be removed
'tags': [], # list of UUIDs to App.Tags
'text_should_not_be_present': [], # Text that should not present
# Re #110, so then if this is set to None, we know to use the default value instead
# Requires setting to None on submit if it's the same as the default
# Should be all None by default, so we use the system default in this case.
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
'time_between_check_use_default': True,
'title': None,
'trigger_text': [], # List of text or regex to wait for until a change is detected
'url': '',
'uuid': str(uuid.uuid4()),
'webdriver_delay': None,
'webdriver_js_execute_code': None, # Run before change-detection
}
def is_safe_url(test_url):
# See https://github.com/dgtlmoon/changedetection.io/issues/1358
@@ -94,20 +31,17 @@ def is_safe_url(test_url):
return True
class model(dict):
class model(watch_base):
__newest_history_key = None
__history_n = 0
jitter_seconds = 0
def __init__(self, *arg, **kw):
self.update(base_config)
self.__datastore_path = kw['datastore_path']
self['uuid'] = str(uuid.uuid4())
del kw['datastore_path']
super(model, self).__init__(*arg, **kw)
if kw.get('default'):
self.update(kw['default'])
del kw['default']
@@ -115,9 +49,6 @@ class model(dict):
# Be sure the cached timestamp is ready
bump = self.history
# Goes at the end so we update the default object with the initialiser
super(model, self).__init__(*arg, **kw)
@property
def viewed(self):
# Don't return viewed when last_viewed is 0 and newest_key is 0
@@ -238,8 +169,6 @@ class model(dict):
if len(tmp_history):
self.__newest_history_key = list(tmp_history.keys())[-1]
else:
self.__newest_history_key = None
self.__history_n = len(tmp_history)
@@ -258,6 +187,17 @@ class model(dict):
return has_browser_steps
@property
def has_restock_info(self):
# has either price or availability
if self.get('restock'):
if self['restock'].get('price') != None:
return True
if self['restock'].get('availability') != None:
return True
return False
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
@property
def newest_history_key(self):
@@ -330,10 +270,15 @@ class model(dict):
def save_history_text(self, contents, timestamp, snapshot_id):
import brotli
logger.trace(f"{self.get('uuid')} - Updating history.txt with timestamp {timestamp}")
self.ensure_data_dir_exists()
# Small hack so that we sleep just enough to allow 1 second between history snapshots
# this is because history.txt indexes/keys snapshots by epoch seconds and we dont want dupe keys
if self.__newest_history_key and int(timestamp) == int(self.__newest_history_key):
logger.warning(f"Timestamp {timestamp} already exists, waiting 1 seconds so we have a unique key in history.txt")
timestamp = str(int(timestamp) + 1)
time.sleep(1)
threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024))
skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False'))
@@ -525,42 +470,8 @@ class model(dict):
# None is set
return False
def save_error_text(self, contents):
self.ensure_data_dir_exists()
target_path = os.path.join(self.watch_data_dir, "last-error.txt")
with open(target_path, 'w') as f:
f.write(contents)
def save_xpath_data(self, data, as_error=False):
import json
if as_error:
target_path = os.path.join(self.watch_data_dir, "elements-error.json")
else:
target_path = os.path.join(self.watch_data_dir, "elements.json")
self.ensure_data_dir_exists()
with open(target_path, 'w') as f:
f.write(json.dumps(data))
f.close()
# Save as PNG, PNG is larger but better for doing visual diff in the future
def save_screenshot(self, screenshot: bytes, as_error=False):
if as_error:
target_path = os.path.join(self.watch_data_dir, "last-error-screenshot.png")
else:
target_path = os.path.join(self.watch_data_dir, "last-screenshot.png")
self.ensure_data_dir_exists()
with open(target_path, 'wb') as f:
f.write(screenshot)
f.close()
def get_last_fetched_text_before_filters(self):
def get_last_fetched_before_filters(self):
import brotli
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
@@ -575,56 +486,12 @@ class model(dict):
with open(filepath, 'rb') as f:
return(brotli.decompress(f.read()).decode('utf-8'))
def save_last_text_fetched_before_filters(self, contents):
def save_last_fetched_before_filters(self, contents):
import brotli
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
with open(filepath, 'wb') as f:
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
def save_last_fetched_html(self, timestamp, contents):
import brotli
self.ensure_data_dir_exists()
snapshot_fname = f"{timestamp}.html.br"
filepath = os.path.join(self.watch_data_dir, snapshot_fname)
with open(filepath, 'wb') as f:
contents = contents.encode('utf-8') if isinstance(contents, str) else contents
try:
f.write(brotli.compress(contents))
except Exception as e:
logger.warning(f"{self.get('uuid')} - Unable to compress snapshot, saving as raw data to {filepath}")
logger.warning(e)
f.write(contents)
self._prune_last_fetched_html_snapshots()
def get_fetched_html(self, timestamp):
import brotli
snapshot_fname = f"{timestamp}.html.br"
filepath = os.path.join(self.watch_data_dir, snapshot_fname)
if os.path.isfile(filepath):
with open(filepath, 'rb') as f:
return (brotli.decompress(f.read()).decode('utf-8'))
return False
def _prune_last_fetched_html_snapshots(self):
dates = list(self.history.keys())
dates.reverse()
for index, timestamp in enumerate(dates):
snapshot_fname = f"{timestamp}.html.br"
filepath = os.path.join(self.watch_data_dir, snapshot_fname)
# Keep only the first 2
if index > 1 and os.path.isfile(filepath):
os.remove(filepath)
@property
def get_browsersteps_available_screenshots(self):
"For knowing which screenshots are available to show the user in BrowserSteps UI"

View File

@@ -0,0 +1,80 @@
import os
import uuid
from changedetectionio import strtobool
from changedetectionio.notification import default_notification_format_for_watch
class Restock(dict):
# @todo some setter to handle weird prices like "00,01" etc?
def __init__(self, *args, **kwargs):
default_values = {'in_stock': None, 'price': None, 'currency': None}
default_values.update(dict(*args, **kwargs))
super().__init__(default_values.copy())
class watch_base(dict):
def __init__(self, *arg, **kw):
self.update({
# Custom notification content
# Re #110, so then if this is set to None, we know to use the default value instead
# Requires setting to None on submit if it's the same as the default
# Should be all None by default, so we use the system default in this case.
'body': None,
'browser_steps': [],
'browser_steps_last_error_step': None,
'check_count': 0,
'check_unique_lines': False, # On change-detected, compare against all history if its something new
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
'date_created': None,
'extract_text': [], # Extract text by regex after filters
'extract_title_as_title': False,
'fetch_backend': 'system', # plaintext, playwright etc
'fetch_time': 0.0,
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
'filter_text_added': True,
'filter_text_removed': True,
'filter_text_replaced': True,
'follow_price_changes': True,
'has_ldjson_price_data': None,
'headers': {}, # Extra headers to send
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
'in_stock': None,
'in_stock_only': True, # Only trigger change on going to instock from out-of-stock
'include_filters': [],
'last_checked': 0,
'last_error': False,
'last_viewed': 0, # history key value of the last viewed via the [diff] link
'method': 'GET',
'notification_alert_count': 0,
'notification_body': None,
'notification_format': default_notification_format_for_watch,
'notification_muted': False,
'notification_screenshot': False, # Include the latest screenshot if available and supported by the apprise URL
'notification_title': None,
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
'paused': False,
'previous_md5': False,
'previous_md5_before_filters': False, # Used for skipping changedetection entirely
'processor': 'text_json_diff', # could be restock_diff or others from .processors
'price_change_threshold_percent': None,
'proxy': None, # Preferred proxy connection
'remote_server_reply': None, # From 'server' reply header
'sort_text_alphabetically': False,
'subtractive_selectors': [],
'tag': '', # Old system of text name for a tag, to be removed
'tags': [], # list of UUIDs to App.Tags
'text_should_not_be_present': [], # Text that should not present
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
'time_between_check_use_default': True,
'title': None,
'track_ldjson_price_data': None,
'trigger_text': [], # List of text or regex to wait for until a change is detected
'url': '',
'uuid': str(uuid.uuid4()),
'webdriver_delay': None,
'webdriver_js_execute_code': None, # Run before change-detection
})
super(watch_base, self).__init__(*arg, **kw)

View File

@@ -1,6 +1,5 @@
from abc import abstractmethod
from changedetectionio.strtobool import strtobool
from changedetectionio.model import Watch
from copy import deepcopy
from loguru import logger
import hashlib
@@ -139,7 +138,7 @@ class difference_detection_processor():
# After init, call run_changedetection() which will do the actual change-detection
@abstractmethod
def run_changedetection(self, watch: Watch, skip_when_checksum_same=True):
def run_changedetection(self, uuid, skip_when_checksum_same=True):
update_obj = {'last_notification_error': False, 'last_error': False}
some_data = 'xxxxx'
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()

View File

@@ -1,31 +1,123 @@
from . import difference_detection_processor
from ..model import Restock
from copy import deepcopy
from loguru import logger
import hashlib
import re
import urllib3
import time
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
name = 'Re-stock detection for single product pages'
name = 'Re-stock & Price detection for single product pages'
description = 'Detects if the product goes back to in-stock'
class UnableToExtractRestockData(Exception):
def __init__(self, status_code):
# Set this so we can use it in other parts of the app
self.status_code = status_code
return
def _search_prop_by_value(matches, value):
for properties in matches:
for prop in properties:
if value in prop[0]:
return prop[1] # Yield the desired value and exit the function
# should return Restock()
# add casting?
def get_itemprop_availability(html_content) -> Restock:
"""
Kind of funny/cool way to find price/availability in one many different possibilities.
Use 'extruct' to find any possible RDFa/microdata/json-ld data, make a JSON string from the output then search it.
"""
from jsonpath_ng import parse
now = time.time()
import extruct
logger.trace(f"Imported extruct module in {time.time() - now:.3f}s")
value = {}
now = time.time()
# Extruct is very slow, I'm wondering if some ML is going to be faster (800ms on my i7), 'rdfa' seems to be the heaviest.
syntaxes = ['dublincore', 'json-ld', 'microdata', 'microformat', 'opengraph']
data = extruct.extract(html_content, syntaxes=syntaxes)
logger.trace(f"Extruct basic extract of all metadata done in {time.time() - now:.3f}s")
# First phase, dead simple scanning of anything that looks useful
if data:
logger.debug(f"Using jsonpath to find price/availability/etc")
price_parse = parse('$..(price|Price)')
pricecurrency_parse = parse('$..(pricecurrency|currency| priceCurrency )')
availability_parse = parse('$..(availability|Availability)')
price_result = price_parse.find(data)
if price_result:
value['price'] = price_result[0].value
pricecurrency_result = pricecurrency_parse.find(data)
if pricecurrency_result:
value['currency'] = pricecurrency_result[0].value
availability_result = availability_parse.find(data)
if availability_result:
value['availability'] = availability_result[0].value
if value.get('availability'):
value['availability'] = re.sub(r'(?i)^(https|http)://schema.org/', '',
value.get('availability').strip(' "\'').lower()) if value.get('availability') else None
# Second, go dig OpenGraph which is something that jsonpath_ng cant do because of the tuples and double-dots (:)
if not value.get('price') or value.get('availability'):
logger.debug(f"Alternatively digging through OpenGraph properties for restock/price info..")
jsonpath_expr = parse('$..properties')
for match in jsonpath_expr.find(data):
if not value.get('price'):
value['price'] = _search_prop_by_value([match.value], "price:amount")
if not value.get('availability'):
value['availability'] = _search_prop_by_value([match.value], "product:availability")
if not value.get('currency'):
value['currency'] = _search_prop_by_value([match.value], "price:currency")
logger.trace(f"Processed with Extruct in {time.time()-now:.3f}s")
return Restock(value)
def is_between(number, lower=None, upper=None):
"""
Check if a number is between two values.
Parameters:
number (float): The number to check.
lower (float or None): The lower bound (inclusive). If None, no lower bound.
upper (float or None): The upper bound (inclusive). If None, no upper bound.
Returns:
bool: True if the number is between the lower and upper bounds, False otherwise.
"""
return (lower is None or lower <= number) and (upper is None or number <= upper)
class perform_site_check(difference_detection_processor):
screenshot = None
xpath_data = None
def run_changedetection(self, watch, skip_when_checksum_same=True):
def run_changedetection(self, uuid, skip_when_checksum_same=True):
# DeepCopy so we can be sure we don't accidently change anything by reference
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch:
raise Exception("Watch no longer exists.")
# Unset any existing notification error
update_obj = {'last_notification_error': False, 'last_error': False}
update_obj = {'last_notification_error': False, 'last_error': False, 'restock': None}
self.screenshot = self.fetcher.screenshot
self.xpath_data = self.fetcher.xpath_data
@@ -34,29 +126,91 @@ class perform_site_check(difference_detection_processor):
update_obj['content_type'] = self.fetcher.headers.get('Content-Type', '')
update_obj["last_check_status"] = self.fetcher.get_last_status_code()
# Main detection method
fetched_md5 = None
if self.fetcher.instock_data:
fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned '{self.fetcher.instock_data}' from JS scraper.")
else:
itemprop_availability = get_itemprop_availability(html_content=self.fetcher.content)
# Something valid in get_itemprop_availability() by scraping metadata ?
if itemprop_availability.get('price') or itemprop_availability.get('availability'):
# Store for other usage
update_obj['restock'] = itemprop_availability
if itemprop_availability.get('availability'):
# @todo: Configurable?
if any(substring.lower() in itemprop_availability['availability'].lower() for substring in [
'instock',
'instoreonly',
'limitedavailability',
'onlineonly',
'presale']
):
update_obj['restock']['in_stock'] = True
else:
update_obj['restock']['in_stock'] = False
# Used for the change detection, we store the real data separately, in the future this can implement some min,max threshold
# @todo if price is None?
self.fetcher.instock_data = f"{itemprop_availability.get('availability')} - {itemprop_availability.get('price')}"
elif self.fetcher.instock_data:
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above in the metadata of the HTML
update_obj['restock'] = Restock({'in_stock': True if self.fetcher.instock_data == 'Possibly in stock' else False})
# @todo scrape price somehow
logger.debug(
f"Restock - using scraped browserdata - Watch UUID {uuid} restock check returned '{self.fetcher.instock_data}' from JS scraper.")
if not self.fetcher.instock_data:
raise UnableToExtractRestockData(status_code=self.fetcher.status_code)
# Main detection method
fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
# The main thing that all this at the moment comes down to :)
changed_detected = False
logger.debug(f"Watch UUID {watch.get('uuid')} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
logger.debug(f"Watch UUID {uuid} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
if watch.get('previous_md5') and watch.get('previous_md5') != fetched_md5:
# out of stock -> back in stock only?
if watch.get('restock') and watch['restock'].get('in_stock') != update_obj['restock'].get('in_stock'):
# Yes if we only care about it going to instock, AND we are in stock
if watch.get('in_stock_only') and update_obj["in_stock"]:
if watch.get('in_stock_only') and update_obj['restock']['in_stock']:
changed_detected = True
if not watch.get('in_stock_only'):
# All cases
changed_detected = True
if watch.get('follow_price_changes') and watch.get('restock') and update_obj.get('restock') and update_obj['restock'].get('price'):
price = float(update_obj['restock'].get('price'))
# Default to current price if no previous price found
previous_price = float(watch['restock'].get('price', price))
# It was different, but negate it further down
if price != previous_price:
changed_detected = True
# Minimum/maximum price limit
if update_obj.get('restock') and update_obj['restock'].get('price'):
logger.debug(
f"{uuid} - Change was detected, 'price_change_max' is '{watch.get('price_change_max', '')}' 'price_change_min' is '{watch.get('price_change_min', '')}', price from website is '{update_obj['restock'].get('price', '')}'.")
if update_obj['restock'].get('price'):
min_limit = float(watch.get('price_change_min')) if watch.get('price_change_min') else None
max_limit = float(watch.get('price_change_max')) if watch.get('price_change_max') else None
price = float(update_obj['restock'].get('price'))
logger.debug(f"{uuid} after float conversion - Min limit: '{min_limit}' Max limit: '{max_limit}' Price: '{price}'")
if min_limit or max_limit:
if is_between(number=price, lower=min_limit, upper=max_limit):
if changed_detected:
logger.debug(f"{uuid} Override change-detected to FALSE because price was inside threshold")
changed_detected = False
if changed_detected and watch.get('price_change_threshold_percent'):
pc = float(watch.get('price_change_threshold_percent'))
change = abs((price - previous_price) / previous_price * 100)
if change and change <= pc:
logger.debug(f"{uuid} Override change-detected to FALSE because % threshold ({pc}%) was {change:.3f}%")
changed_detected = False
else:
logger.debug(f"{uuid} Price change was {change:.3f}% , (threshold {pc}%)")
# Always record the new checksum
update_obj["previous_md5"] = fetched_md5
return changed_detected, update_obj, self.fetcher.instock_data.encode('utf-8').strip()

View File

@@ -10,18 +10,18 @@ from . import difference_detection_processor
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
from changedetectionio import html_tools, content_fetchers
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
import changedetectionio.content_fetchers
from copy import deepcopy
from loguru import logger
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
name = 'Webpage Text/HTML, JSON and PDF changes'
description = 'Detects all text changes where possible'
json_filter_prefixes = ['json:', 'jq:', 'jqraw:']
json_filter_prefixes = ['json:', 'jq:']
class FilterNotFoundInResponse(ValueError):
def __init__(self, msg, screenshot=None, xpath_data=None):
self.screenshot = screenshot
self.xpath_data = xpath_data
def __init__(self, msg):
ValueError.__init__(self, msg)
@@ -34,12 +34,14 @@ class PDFToHTMLToolNotFound(ValueError):
# (set_proxy_from_list)
class perform_site_check(difference_detection_processor):
def run_changedetection(self, watch, skip_when_checksum_same=True):
def run_changedetection(self, uuid, skip_when_checksum_same=True):
changed_detected = False
html_content = ""
screenshot = False # as bytes
stripped_text_from_html = ""
# DeepCopy so we can be sure we don't accidently change anything by reference
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch:
raise Exception("Watch no longer exists.")
@@ -114,12 +116,12 @@ class perform_site_check(difference_detection_processor):
# Better would be if Watch.model could access the global data also
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='include_filters')
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters')
# 1845 - remove duplicated filters in both group and watch include filter
include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags))
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='subtractive_selectors'),
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'),
*watch.get("subtractive_selectors", []),
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
]
@@ -186,7 +188,7 @@ class perform_site_check(difference_detection_processor):
append_pretty_line_formatting=not watch.is_source_type_url)
if not html_content.strip():
raise FilterNotFoundInResponse(msg=include_filters_rule, screenshot=self.fetcher.screenshot, xpath_data=self.fetcher.xpath_data)
raise FilterNotFoundInResponse(include_filters_rule)
if has_subtractive_selectors:
html_content = html_tools.element_removal(subtractive_selectors, html_content)
@@ -220,7 +222,7 @@ class perform_site_check(difference_detection_processor):
from .. import diff
# needs to not include (added) etc or it may get used twice
# Replace the processed text with the preferred result
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_text_before_filters(),
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(),
newest_version_file_contents=stripped_text_from_html,
include_equal=False, # not the same lines
include_added=watch.get('filter_text_added', True),
@@ -229,7 +231,7 @@ class perform_site_check(difference_detection_processor):
line_feed_sep="\n",
include_change_type_prefix=False)
watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter)
watch.save_last_fetched_before_filters(text_content_before_ignored_filter)
if not rendered_diff and stripped_text_from_html:
# We had some content, but no differences were found
@@ -244,10 +246,9 @@ class perform_site_check(difference_detection_processor):
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
raise content_fetchers.exceptions.ReplyWithContentButNoText(url=url,
status_code=self.fetcher.get_last_status_code(),
screenshot=self.fetcher.screenshot,
screenshot=screenshot,
has_filters=has_filter_rule,
html_content=html_content,
xpath_data=self.fetcher.xpath_data
html_content=html_content
)
# We rely on the actual text in the html output.. many sites have random script vars etc,
@@ -343,17 +344,17 @@ class perform_site_check(difference_detection_processor):
if not watch['title'] or not len(watch['title']):
update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content)
logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
logger.debug(f"Watch UUID {uuid} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
if changed_detected:
if watch.get('check_unique_lines', False):
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines())
# One or more lines? unsure?
if not has_unique_lines:
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False")
logger.debug(f"check_unique_lines: UUID {uuid} didnt have anything new setting change_detected=False")
changed_detected = False
else:
logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content")
logger.debug(f"check_unique_lines: UUID {uuid} had unique content")
# Always record the new checksum
update_obj["previous_md5"] = fetched_md5

View File

@@ -1,5 +1,14 @@
$(document).ready(function () {
// duplicate
var csrftoken = $('input[name=csrf_token]').val();
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
var browsersteps_session_id;
var browser_interface_seconds_remaining = 0;
var apply_buttons_disabled = false;

View File

@@ -1,10 +0,0 @@
$(document).ready(function () {
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
});

View File

@@ -1,4 +1,13 @@
$(document).ready(function () {
var csrftoken = $('input[name=csrf_token]').val();
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
$('.needs-localtime').each(function () {
for (var option of this.options) {
var dateObject = new Date(option.value * 1000);
@@ -39,12 +48,6 @@ $(document).ready(function () {
$("#highlightSnippet").remove();
}
// Listen for Escape key press
window.addEventListener('keydown', function (e) {
if (e.key === 'Escape') {
clean();
}
}, false);
function dragTextHandler(event) {
console.log('mouseupped');

View File

@@ -13,6 +13,16 @@ $(document).ready(function() {
$('#send-test-notification').click(function (e) {
e.preventDefault();
// this can be global
var csrftoken = $('input[name=csrf_token]').val();
$.ajaxSetup({
beforeSend: function(xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
data = {
notification_body: $('#notification_body').val(),
notification_format: $('#notification_format').val(),

View File

@@ -2,258 +2,250 @@
// All rights reserved.
// yes - this is really a hack, if you are a front-ender and want to help, please get in touch!
let runInClearMode = false;
$(document).ready(function () {
$(document).ready(() => {
let currentSelections = [];
let currentSelection = null;
let appendToList = false;
let c, xctx, ctx;
let xScale = 1, yScale = 1;
let selectorImage, selectorImageRect, selectorData;
var current_selected_i;
var state_clicked = false;
var c;
// Global jQuery selectors with "Elem" appended
const $selectorCanvasElem = $('#selector-canvas');
const $includeFiltersElem = $("#include_filters");
const $selectorBackgroundElem = $("img#selector-background");
const $selectorCurrentXpathElem = $("#selector-current-xpath span");
const $fetchingUpdateNoticeElem = $('.fetching-update-notice');
const $selectorWrapperElem = $("#selector-wrapper");
// greyed out fill context
var xctx;
// redline highlight context
var ctx;
// Color constants
const FILL_STYLE_HIGHLIGHT = 'rgba(205,0,0,0.35)';
const FILL_STYLE_GREYED_OUT = 'rgba(205,205,205,0.95)';
const STROKE_STYLE_HIGHLIGHT = 'rgba(255,0,0, 0.9)';
const FILL_STYLE_REDLINE = 'rgba(255,0,0, 0.1)';
const STROKE_STYLE_REDLINE = 'rgba(225,0,0,0.9)';
var current_default_xpath = [];
var x_scale = 1;
var y_scale = 1;
var selector_image;
var selector_image_rect;
var selector_data;
$('#visualselector-tab').click(() => {
$selectorBackgroundElem.off('load');
currentSelections = [];
bootstrapVisualSelector();
$('#visualselector-tab').click(function () {
$("img#selector-background").off('load');
state_clicked = false;
current_selected_i = false;
bootstrap_visualselector();
});
function clearReset() {
ctx.clearRect(0, 0, c.width, c.height);
if ($includeFiltersElem.val().length) {
alert("Existing filters under the 'Filters & Triggers' tab were cleared.");
}
$includeFiltersElem.val('');
currentSelections = [];
// Means we ignore the xpaths from the scraper marked as sel.highlight_as_custom_filter (it matched a previous selector)
runInClearMode = true;
highlightCurrentSelected();
}
function splitToList(v) {
return v.split('\n').map(line => line.trim()).filter(line => line.length > 0);
}
function sortScrapedElementsBySize() {
// Sort the currentSelections array by area (width * height) in descending order
selectorData['size_pos'].sort((a, b) => {
const areaA = a.width * a.height;
const areaB = b.width * b.height;
return areaB - areaA;
});
}
$(document).on('keydown keyup', (event) => {
if (event.code === 'ShiftLeft' || event.code === 'ShiftRight') {
appendToList = event.type === 'keydown';
}
if (event.type === 'keydown') {
if ($selectorBackgroundElem.is(":visible") && event.key === "Escape") {
clearReset();
$(document).on('keydown', function (event) {
if ($("img#selector-background").is(":visible")) {
if (event.key == "Escape") {
state_clicked = false;
ctx.clearRect(0, 0, c.width, c.height);
}
}
});
$('#clear-selector').on('click', () => {
clearReset();
});
// So if they start switching between visualSelector and manual filters, stop it from rendering old filters
$('li.tab a').on('click', () => {
runInClearMode = true;
});
if (!window.location.hash || window.location.hash !== '#visualselector') {
$selectorBackgroundElem.attr('src', '');
// For when the page loads
if (!window.location.hash || window.location.hash != '#visualselector') {
$("img#selector-background").attr('src', '');
return;
}
bootstrapVisualSelector();
// Handle clearing button/link
$('#clear-selector').on('click', function (event) {
if (!state_clicked) {
alert('Oops, Nothing selected!');
}
state_clicked = false;
ctx.clearRect(0, 0, c.width, c.height);
xctx.clearRect(0, 0, c.width, c.height);
$("#include_filters").val('');
});
function bootstrapVisualSelector() {
$selectorBackgroundElem
.on("error", () => {
$fetchingUpdateNoticeElem.html("<strong>Ooops!</strong> The VisualSelector tool needs at least one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page.")
.css('color', '#bb0000');
$('#selector-current-xpath, #clear-selector').hide();
})
.on('load', () => {
bootstrap_visualselector();
function bootstrap_visualselector() {
if (1) {
// bootstrap it, this will trigger everything else
$("img#selector-background").on("error", function () {
$('.fetching-update-notice').html("<strong>Ooops!</strong> The VisualSelector tool needs atleast one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page.");
$('.fetching-update-notice').css('color','#bb0000');
$('#selector-current-xpath').hide();
$('#clear-selector').hide();
}).bind('load', function () {
console.log("Loaded background...");
c = document.getElementById("selector-canvas");
// greyed out fill context
xctx = c.getContext("2d");
// redline highlight context
ctx = c.getContext("2d");
fetchData();
$selectorCanvasElem.off("mousemove mousedown");
})
.attr("src", screenshot_url);
let s = `${$selectorBackgroundElem.attr('src')}?${new Date().getTime()}`;
$selectorBackgroundElem.attr('src', s);
}
function alertIfFilterNotFound() {
let existingFilters = splitToList($includeFiltersElem.val());
let sizePosXpaths = selectorData['size_pos'].map(sel => sel.xpath);
for (let filter of existingFilters) {
if (!sizePosXpaths.includes(filter)) {
alert(`One or more of your existing filters was not found and will be removed when a new filter is selected.`);
break;
}
if ($("#include_filters").val().trim().length) {
current_default_xpath = $("#include_filters").val().split(/\r?\n/g);
} else {
current_default_xpath = [];
}
fetch_data();
$('#selector-canvas').off("mousemove mousedown");
// screenshot_url defined in the edit.html template
}).attr("src", screenshot_url);
}
// Tell visualSelector that the image should update
var s = $("img#selector-background").attr('src') + "?" + new Date().getTime();
$("img#selector-background").attr('src', s)
}
function fetchData() {
$fetchingUpdateNoticeElem.html("Fetching element data..");
// This is fired once the img src is loaded in bootstrap_visualselector()
function fetch_data() {
// Image is ready
$('.fetching-update-notice').html("Fetching element data..");
$.ajax({
url: watch_visual_selector_data_url,
context: document.body
}).done((data) => {
$fetchingUpdateNoticeElem.html("Rendering..");
selectorData = data;
sortScrapedElementsBySize();
console.log(`Reported browser width from backend: ${data['browser_width']}`);
// Little sanity check for the user, alert them if something missing
alertIfFilterNotFound();
setScale();
reflowSelector();
$fetchingUpdateNoticeElem.fadeOut();
}).done(function (data) {
$('.fetching-update-notice').html("Rendering..");
selector_data = data;
console.log("Reported browser width from backend: " + data['browser_width']);
state_clicked = false;
set_scale();
reflow_selector();
$('.fetching-update-notice').fadeOut();
});
}
function updateFiltersText() {
// Assuming currentSelections is already defined and contains the selections
let uniqueSelections = new Set(currentSelections.map(sel => (sel[0] === '/' ? `xpath:${sel.xpath}` : sel.xpath)));
if (currentSelections.length > 0) {
// Convert the Set back to an array and join with newline characters
let textboxFilterText = Array.from(uniqueSelections).join("\n");
$includeFiltersElem.val(textboxFilterText);
}
}
function set_scale() {
function setScale() {
$selectorWrapperElem.show();
selectorImage = $selectorBackgroundElem[0];
selectorImageRect = selectorImage.getBoundingClientRect();
// some things to check if the scaling doesnt work
// - that the widths/sizes really are about the actual screen size cat elements.json |grep -o width......|sort|uniq
$("#selector-wrapper").show();
selector_image = $("img#selector-background")[0];
selector_image_rect = selector_image.getBoundingClientRect();
$selectorCanvasElem.attr({
'height': selectorImageRect.height,
'width': selectorImageRect.width
});
$selectorWrapperElem.attr('width', selectorImageRect.width);
$('#visual-selector-heading').css('max-width', selectorImageRect.width + "px")
xScale = selectorImageRect.width / selectorImage.naturalWidth;
yScale = selectorImageRect.height / selectorImage.naturalHeight;
ctx.strokeStyle = STROKE_STYLE_HIGHLIGHT;
ctx.fillStyle = FILL_STYLE_REDLINE;
// make the canvas the same size as the image
$('#selector-canvas').attr('height', selector_image_rect.height);
$('#selector-canvas').attr('width', selector_image_rect.width);
$('#selector-wrapper').attr('width', selector_image_rect.width);
x_scale = selector_image_rect.width / selector_data['browser_width'];
y_scale = selector_image_rect.height / selector_image.naturalHeight;
ctx.strokeStyle = 'rgba(255,0,0, 0.9)';
ctx.fillStyle = 'rgba(255,0,0, 0.1)';
ctx.lineWidth = 3;
console.log("Scaling set x: " + xScale + " by y:" + yScale);
$("#selector-current-xpath").css('max-width', selectorImageRect.width);
console.log("scaling set x: " + x_scale + " by y:" + y_scale);
$("#selector-current-xpath").css('max-width', selector_image_rect.width);
}
function reflowSelector() {
$(window).resize(() => {
setScale();
highlightCurrentSelected();
function reflow_selector() {
$(window).resize(function () {
set_scale();
highlight_current_selected_i();
});
var selector_currnt_xpath_text = $("#selector-current-xpath span");
setScale();
set_scale();
console.log(selectorData['size_pos'].length + " selectors found");
console.log(selector_data['size_pos'].length + " selectors found");
let existingFilters = splitToList($includeFiltersElem.val());
selectorData['size_pos'].forEach(sel => {
if ((!runInClearMode && sel.highlight_as_custom_filter) || existingFilters.includes(sel.xpath)) {
console.log("highlighting " + c);
currentSelections.push(sel);
// highlight the default one if we can find it in the xPath list
// or the xpath matches the default one
found = false;
if (current_default_xpath.length) {
// Find the first one that matches
// @todo In the future paint all that match
for (const c of current_default_xpath) {
for (var i = selector_data['size_pos'].length; i !== 0; i--) {
if (selector_data['size_pos'][i - 1].xpath.trim() === c.trim()) {
console.log("highlighting " + c);
current_selected_i = i - 1;
highlight_current_selected_i();
found = true;
break;
}
}
if (found) {
break;
}
}
});
if (!found) {
alert("Unfortunately your existing CSS/xPath Filter was no longer found!");
}
}
highlightCurrentSelected();
updateFiltersText();
$('#selector-canvas').bind('mousemove', function (e) {
if (state_clicked) {
return;
}
ctx.clearRect(0, 0, c.width, c.height);
current_selected_i = null;
$selectorCanvasElem.bind('mousemove', handleMouseMove.debounce(5));
$selectorCanvasElem.bind('mousedown', handleMouseDown.debounce(5));
$selectorCanvasElem.bind('mouseleave', highlightCurrentSelected.debounce(5));
function handleMouseMove(e) {
if (!e.offsetX && !e.offsetY) {
const targetOffset = $(e.target).offset();
// Add in offset
if ((typeof e.offsetX === "undefined" || typeof e.offsetY === "undefined") || (e.offsetX === 0 && e.offsetY === 0)) {
var targetOffset = $(e.target).offset();
e.offsetX = e.pageX - targetOffset.left;
e.offsetY = e.pageY - targetOffset.top;
}
ctx.fillStyle = FILL_STYLE_HIGHLIGHT;
// Reverse order - the most specific one should be deeper/"laster"
// Basically, find the most 'deepest'
var found = 0;
ctx.fillStyle = 'rgba(205,0,0,0.35)';
// Will be sorted by smallest width*height first
for (var i = 0; i <= selector_data['size_pos'].length; i++) {
// draw all of them? let them choose somehow?
var sel = selector_data['size_pos'][i];
// If we are in a bounding-box
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
&&
e.offsetX > sel.left * y_scale && e.offsetX < sel.left * y_scale + sel.width * y_scale
selectorData['size_pos'].forEach(sel => {
if (e.offsetY > sel.top * yScale && e.offsetY < sel.top * yScale + sel.height * yScale &&
e.offsetX > sel.left * yScale && e.offsetX < sel.left * yScale + sel.width * yScale) {
setCurrentSelectedText(sel.xpath);
drawHighlight(sel);
currentSelections.push(sel);
currentSelection = sel;
highlightCurrentSelected();
currentSelections.pop();
) {
// FOUND ONE
set_current_selected_text(sel.xpath);
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
// no need to keep digging
// @todo or, O to go out/up, I to go in
// or double click to go up/out the selector?
current_selected_i = i;
found += 1;
break;
}
})
}
}.debounce(5));
function set_current_selected_text(s) {
selector_currnt_xpath_text[0].innerHTML = s;
}
function highlight_current_selected_i() {
if (state_clicked) {
state_clicked = false;
xctx.clearRect(0, 0, c.width, c.height);
return;
}
var sel = selector_data['size_pos'][current_selected_i];
if (sel[0] == '/') {
// @todo - not sure just checking / is right
$("#include_filters").val('xpath:' + sel.xpath);
} else {
$("#include_filters").val(sel.xpath);
}
xctx.fillStyle = 'rgba(205,205,205,0.95)';
xctx.strokeStyle = 'rgba(225,0,0,0.9)';
xctx.lineWidth = 3;
xctx.fillRect(0, 0, c.width, c.height);
// Clear out what only should be seen (make a clear/clean spot)
xctx.clearRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
xctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
state_clicked = true;
set_current_selected_text(sel.xpath);
}
function setCurrentSelectedText(s) {
$selectorCurrentXpathElem[0].innerHTML = s;
}
function drawHighlight(sel) {
ctx.strokeRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
ctx.fillRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
}
function handleMouseDown() {
// If we are in 'appendToList' mode, grow the list, if not, just 1
currentSelections = appendToList ? [...currentSelections, currentSelection] : [currentSelection];
highlightCurrentSelected();
updateFiltersText();
}
}
function highlightCurrentSelected() {
xctx.fillStyle = FILL_STYLE_GREYED_OUT;
xctx.strokeStyle = STROKE_STYLE_REDLINE;
xctx.lineWidth = 3;
xctx.clearRect(0, 0, c.width, c.height);
currentSelections.forEach(sel => {
//xctx.clearRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
xctx.strokeRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
$('#selector-canvas').bind('mousedown', function (e) {
highlight_current_selected_i();
});
}
});

View File

@@ -1,8 +1,8 @@
function toggleOpacity(checkboxSelector, fieldSelector) {
function toggleOpacity(checkboxSelector, fieldSelector, inverted) {
const checkbox = document.querySelector(checkboxSelector);
const fields = document.querySelectorAll(fieldSelector);
function updateOpacity() {
const opacityValue = checkbox.checked ? 0.6 : 1;
const opacityValue = !checkbox.checked ? (inverted ? 0.6 : 1) : (inverted ? 1 : 0.6);
fields.forEach(field => {
field.style.opacity = opacityValue;
});
@@ -25,6 +25,8 @@ $(document).ready(function () {
$('#notification-tokens-info').toggle();
});
toggleOpacity('#time_between_check_use_default', '#time_between_check');
toggleOpacity('#time_between_check_use_default', '#time_between_check', false);
});

View File

@@ -1,8 +1,6 @@
#selector-wrapper {
height: 100%;
text-align: center;
max-height: 70vh;
overflow-y: scroll;
position: relative;

View File

@@ -186,12 +186,17 @@ code {
}
}
.watch-tag-list {
color: var(--color-white);
.inline-tag {
white-space: nowrap;
background: var(--color-text-watch-tag-list);
border-radius: 5px;
padding: 2px 5px;
margin-right: 4px;
}
.watch-tag-list {
color: var(--color-white);
background: var(--color-text-watch-tag-list);
@extend .inline-tag;
}
.box {
@@ -671,25 +676,14 @@ footer {
and also iPads specifically.
*/
.watch-table {
/* make headings work on mobile */
thead {
display: block;
tr {
th {
display: inline-block;
}
}
.empty-cell {
display: none;
}
}
/* Force table to not be like tables anymore */
tbody {
td,
tr {
display: block;
}
thead,
tbody,
th,
td,
tr {
display: block;
}
.last-checked {
@@ -713,6 +707,13 @@ footer {
display: inline-block;
}
/* Hide table headers (but not display: none;, for accessibility) */
thead tr {
position: absolute;
top: -9999px;
left: -9999px;
}
.pure-table td,
.pure-table th {
border: none;
@@ -757,7 +758,6 @@ footer {
thead {
background-color: var(--color-background-table-thead);
color: var(--color-text);
border-bottom: 1px solid var(--color-background-table-thead);
}
td,
@@ -1061,9 +1061,8 @@ ul {
.tracking-ldjson-price-data {
background-color: var(--color-background-button-green);
color: #000;
padding: 3px;
border-radius: 3px;
white-space: nowrap;
opacity: 0.6;
@extend .inline-tag;
}
.ldjson-price-track-offer {
@@ -1109,9 +1108,12 @@ ul {
background-color: var(--color-background-button-cancel);
color: #777;
}
padding: 3px;
border-radius: 3px;
white-space: nowrap;
&.error {
background-color: var(--color-background-button-error);
color: #fff;
opacity: 0.7;
}
@extend .inline-tag;
}
#chrome-extension-link {

View File

@@ -531,12 +531,15 @@ code {
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
margin: 0 3px 0 5px; }
.inline-tag, .watch-tag-list, .tracking-ldjson-price-data, .restock-label {
white-space: nowrap;
border-radius: 5px;
padding: 2px 5px;
margin-right: 4px; }
.watch-tag-list {
color: var(--color-white);
white-space: nowrap;
background: var(--color-text-watch-tag-list);
border-radius: 5px;
padding: 2px 5px; }
background: var(--color-text-watch-tag-list); }
.box {
max-width: 80%;
@@ -863,17 +866,14 @@ footer {
and also iPads specifically.
*/
.watch-table {
/* make headings work on mobile */
/* Force table to not be like tables anymore */
/* Force table to not be like tables anymore */ }
.watch-table thead {
display: block; }
.watch-table thead tr th {
display: inline-block; }
.watch-table thead .empty-cell {
display: none; }
.watch-table tbody td,
.watch-table tbody tr {
/* Force table to not be like tables anymore */
/* Hide table headers (but not display: none;, for accessibility) */ }
.watch-table thead,
.watch-table tbody,
.watch-table th,
.watch-table td,
.watch-table tr {
display: block; }
.watch-table .last-checked > span {
vertical-align: middle; }
@@ -885,6 +885,10 @@ footer {
content: "Last Changed "; }
.watch-table td.inline {
display: inline-block; }
.watch-table thead tr {
position: absolute;
top: -9999px;
left: -9999px; }
.watch-table .pure-table td,
.watch-table .pure-table th {
border: none; }
@@ -911,8 +915,7 @@ footer {
border-color: var(--color-border-table-cell); }
.pure-table thead {
background-color: var(--color-background-table-thead);
color: var(--color-text);
border-bottom: 1px solid var(--color-background-table-thead); }
color: var(--color-text); }
.pure-table td,
.pure-table th {
border-left-color: var(--color-border-table-cell); }
@@ -1065,7 +1068,6 @@ ul {
#selector-wrapper {
height: 100%;
text-align: center;
max-height: 70vh;
overflow-y: scroll;
position: relative; }
@@ -1153,9 +1155,7 @@ ul {
.tracking-ldjson-price-data {
background-color: var(--color-background-button-green);
color: #000;
padding: 3px;
border-radius: 3px;
white-space: nowrap; }
opacity: 0.6; }
.ldjson-price-track-offer {
font-weight: bold;
@@ -1180,16 +1180,18 @@ ul {
#quick-watch-processor-type ul li > * {
display: inline-block; }
.restock-label {
padding: 3px;
border-radius: 3px;
white-space: nowrap; }
.restock-label.in-stock {
background-color: var(--color-background-button-green);
color: #fff; }
.restock-label.not-in-stock {
background-color: var(--color-background-button-cancel);
color: #777; }
.restock-label.in-stock {
background-color: var(--color-background-button-green);
color: #fff; }
.restock-label.not-in-stock {
background-color: var(--color-background-button-cancel);
color: #777; }
.restock-label.error {
background-color: var(--color-background-button-error);
color: #fff;
opacity: 0.7; }
#chrome-extension-link {
padding: 9px;

View File

@@ -163,6 +163,7 @@ class ChangeDetectionStore:
del (update_obj[dict_key])
self.__data['watching'][uuid].update(update_obj)
self.needs_write = True
@property
@@ -241,15 +242,7 @@ class ChangeDetectionStore:
# Remove a watchs data but keep the entry (URL etc)
def clear_watch_history(self, uuid):
import pathlib
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
unlink(item)
# Force the attr to recalculate
bump = self.__data['watching'][uuid].history
# Do this last because it will trigger a recheck due to last_checked being zero
from .model import Restock
self.__data['watching'][uuid].update({
'browser_steps_last_error_step' : None,
'check_count': 0,
@@ -264,8 +257,16 @@ class ChangeDetectionStore:
'previous_md5_before_filters': False,
'remote_server_reply': None,
'track_ldjson_price_data': None,
'restock': Restock()
})
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
unlink(item)
# Force the attr to recalculate
bump = self.__data['watching'][uuid].history
self.needs_write_urgent = True
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
@@ -376,6 +377,46 @@ class ChangeDetectionStore:
return False
# Save as PNG, PNG is larger but better for doing visual diff in the future
def save_screenshot(self, watch_uuid, screenshot: bytes, as_error=False):
if not self.data['watching'].get(watch_uuid):
return
if as_error:
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error-screenshot.png")
else:
target_path = os.path.join(self.datastore_path, watch_uuid, "last-screenshot.png")
self.data['watching'][watch_uuid].ensure_data_dir_exists()
with open(target_path, 'wb') as f:
f.write(screenshot)
f.close()
def save_error_text(self, watch_uuid, contents):
if not self.data['watching'].get(watch_uuid):
return
self.data['watching'][watch_uuid].ensure_data_dir_exists()
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
with open(target_path, 'w') as f:
f.write(contents)
def save_xpath_data(self, watch_uuid, data, as_error=False):
if not self.data['watching'].get(watch_uuid):
return
if as_error:
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
else:
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
self.data['watching'][watch_uuid].ensure_data_dir_exists()
with open(target_path, 'w') as f:
f.write(json.dumps(data))
f.close()
def sync_to_json(self):
logger.info("Saving JSON..")
try:
@@ -582,7 +623,8 @@ class ChangeDetectionStore:
# Eventually almost everything todo with a watch will apply as a Tag
# So we use the same model as a Watch
with self.lock:
new_tag = Watch.model(datastore_path=self.datastore_path, default={
from .model import Tag
new_tag = Tag.model(datastore_path=self.datastore_path, default={
'title': name.strip(),
'date_created': int(time.time())
})
@@ -621,6 +663,12 @@ class ChangeDetectionStore:
return next((v for v in tags if v.get('title', '').lower() == tag_name.lower()),
None)
def any_watches_have_processor_by_name(self, processor_name):
for watch in self.data['watching'].values():
if watch.get('processor') == processor_name:
return True
return False
def get_updates_available(self):
import inspect
updates_available = []
@@ -844,8 +892,3 @@ class ChangeDetectionStore:
# Something custom here
self.__data["watching"][uuid]['time_between_check_use_default'] = False
# Correctly set datatype for older installs where 'tag' was string and update_12 did not catch it
def update_16(self):
for uuid, watch in self.data['watching'].items():
if isinstance(watch.get('tags'), str):
self.data['watching'][uuid]['tags'] = []

View File

@@ -26,11 +26,7 @@
<meta name="msapplication-TileColor" content="#da532c">
<meta name="msapplication-config" content="favicons/browserconfig.xml">
<meta name="theme-color" content="#ffffff">
<script>
const csrftoken="{{ csrf_token() }}";
</script>
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script>
</head>
<body>

View File

@@ -16,7 +16,7 @@
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
{% endif %}
const notification_base_url="{{url_for('ajax_callback_send_notification_test', watch_uuid=uuid)}}";
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
const playwright_enabled={% if playwright_enabled %}true{% else %}false{% endif %};
const recheck_proxy_start_url="{{url_for('check_proxies.start_check', uuid=uuid)}}";
const proxy_recheck_status_url="{{url_for('check_proxies.get_recheck_status', uuid=uuid)}}";
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
@@ -41,18 +41,16 @@
<ul>
<li class="tab" id=""><a href="#general">General</a></li>
<li class="tab"><a href="#request">Request</a></li>
{% if extra_tab_content %}
<li class="tab"><a href="#extras_tab">{{ extra_tab_content }}</a></li>
{% endif %}
{% if playwright_enabled %}
<li class="tab"><a id="browsersteps-tab" href="#browser-steps">Browser Steps</a></li>
{% endif %}
{% if watch['processor'] == 'text_json_diff' %}
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li>
<li class="tab"><a href="#filters-and-triggers">Filters &amp; Triggers</a></li>
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<li class="tab"><a href="#restock">Restock Detection</a></li>
{% endif %}
<li class="tab"><a href="#notifications">Notifications</a></li>
<li class="tab"><a href="#stats">Stats</a></li>
</ul>
@@ -72,7 +70,7 @@
<span class="pure-form-message-inline">
{% if watch['processor'] == 'text_json_diff' %}
Current mode: <strong>Webpage Text/HTML, JSON and PDF changes.</strong><br>
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=restock_diff" class="pure-button button-xsmall">Switch to re-stock detection mode.</a>
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=restock_diff" class="pure-button button-xsmall">Switch to re-stock & Price detection mode for single product pages</a>
{% else %}
Current mode: <strong>Re-stock detection.</strong><br>
<a href="{{url_for('edit_page', uuid=uuid)}}?switch_processor=text_json_diff" class="pure-button button-xsmall">Switch to Webpage Text/HTML, JSON and PDF changes mode.</a>
@@ -292,7 +290,7 @@ xpath://body/div/span[contains(@class, 'example-class')]",
<ul>
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
{% if jq_support %}
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>. Prefix <code>jqraw:</code> outputs the results as text instead of a JSON list.</li>
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li>
{% else %}
<li>jq support not installed</li>
{% endif %}
@@ -413,18 +411,12 @@ Unavailable") }}
</div>
</div>
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<div class="tab-pane-inner" id="restock">
<fieldset>
<div class="pure-control-group">
{{ render_checkbox_field(form.in_stock_only) }}
<span class="pure-form-message-inline">Only trigger notifications when page changes from <strong>out of stock</strong> to <strong>back in stock</strong></span>
</div>
</fieldset>
{# rendered sub Template #}
{% if extra_form_content %}
<div class="tab-pane-inner" id="extras_tab">
{{ extra_form_content|safe }}
</div>
{% endif %}
{% endif %}
{% if watch['processor'] == 'text_json_diff' %}
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
<img class="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}" alt="New beta functionality">
@@ -432,8 +424,9 @@ Unavailable") }}
<fieldset>
<div class="pure-control-group">
{% if visualselector_enabled %}
<span class="pure-form-message-inline" id="visual-selector-heading">
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection. It automatically fills-in the filters in the "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab. Use <strong>Shift+Click</strong> to select multiple items.
<span class="pure-form-message-inline">
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection &dash; after the <i>Browser Steps</i> has completed.<br>
This tool is a helper to manage filters in the "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab.
</span>
<div id="selector-header">

View File

@@ -59,6 +59,11 @@
{% set sort_order = sort_order or 'asc' %}
{% set sort_attribute = sort_attribute or 'last_changed' %}
{% set pagination_page = request.args.get('page', 0) %}
{% set cols_required = 6 %}
{% set any_has_restock_price_processor = datastore.any_watches_have_processor_by_name("restock_diff") %}
{% if any_has_restock_price_processor %}
{% set cols_required = cols_required + 1 %}
{% endif %}
<div id="watch-table-wrapper">
@@ -68,17 +73,20 @@
{% set link_order = "desc" if sort_order == 'asc' else "asc" %}
{% set arrow_span = "" %}
<th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th>
<th class="empty-cell"></th>
<th></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th>
{% if any_has_restock_price_processor %}
<th>Restock &amp; Price</th>
{% endif %}
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
<th class="empty-cell"></th>
<th></th>
</tr>
</thead>
<tbody>
{% if not watches|length %}
<tr>
<td colspan="6" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td>
<td colspan="{{ cols_required }}" style="text-wrap: wrap;">No website watches configured, please add a URL in the box above, or <a href="{{ url_for('import_page')}}" >import a list</a>.</td>
</tr>
{% endif %}
{% for watch in (watches|sort(attribute=sort_attribute, reverse=sort_order == 'asc'))|pagination_slice(skip=pagination.skip) %}
@@ -91,6 +99,7 @@
{% if watch.last_notification_error is defined and watch.last_notification_error != False %}error{% endif %}
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
{% if is_unviewed %}unviewed{% endif %}
{% if watch.has_restock_info %}has-restock-info {% if watch['restock']['in_stock'] %}in-stock{% else %}not-in-stock{% endif %}{% endif %}
{% if watch.uuid in queued_uuids %}queued{% endif %}">
<td class="inline checkbox-uuid" ><input name="uuids" type="checkbox" value="{{ watch.uuid}} " > <span>{{ loop.index+pagination.skip }}</span></td>
<td class="inline watch-controls">
@@ -135,30 +144,40 @@
{% if watch['processor'] == 'text_json_diff' %}
{% if watch['has_ldjson_price_data'] and not watch['track_ldjson_price_data'] %}
<div class="ldjson-price-track-offer">Embedded price data detected, follow only price data? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
<div class="ldjson-price-track-offer">Switch to Restock & Price watch mode? <a href="{{url_for('price_data_follower.accept', uuid=watch.uuid)}}" class="pure-button button-xsmall">Yes</a> <a href="{{url_for('price_data_follower.reject', uuid=watch.uuid)}}" class="">No</a></div>
{% endif %}
{% if watch['track_ldjson_price_data'] == 'accepted' %}
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<span class="tracking-ldjson-price-data" title="Automatically following embedded price information"><img src="{{url_for('static_content', group='images', filename='price-tag-icon.svg')}}" class="status-icon price-follow-tag-icon" > Price</span>
{% endif %}
{% endif %}
{% if watch['processor'] == 'restock_diff' %}
<span class="restock-label {{'in-stock' if watch['in_stock'] else 'not-in-stock' }}" title="detecting restock conditions">
<!-- maybe some object watch['processor'][restock_diff] or.. -->
{% if watch['last_checked'] and watch['in_stock'] != None %}
{% if watch['in_stock'] %} In stock {% else %} Not in stock {% endif %}
{% else %}
Not yet checked
{% endif %}
</span>
{% endif %}
{% for watch_tag_uuid, watch_tag in datastore.get_all_tags_for_watch(watch['uuid']).items() %}
<span class="watch-tag-list">{{ watch_tag.title }}</span>
{% endfor %}
</td>
{% if any_has_restock_price_processor %}
<td class="restock-and-price">
{% if watch['processor'] == 'restock_diff' %}
{% if watch.get('restock') and watch['restock']['in_stock'] != None %}
<span class="restock-label {{'in-stock' if watch['restock']['in_stock'] else 'not-in-stock' }}" title="Detecting restock and price">
<!-- maybe some object watch['processor'][restock_diff] or.. -->
{% if watch['restock']['in_stock'] %} In stock {% else %} Not in stock {% endif %}
</span>
{% endif %}
{% if watch.get('restock') and watch['restock']['price'] != None %}
{% if watch['restock']['price'] != None %}
<span class="restock-label price {{'in-stock' if watch['restock']['in_stock'] else 'not-in-stock' }}" title="Price">
{{ watch['restock']['price'] }} {{ watch['restock']['currency'] }}
</span>
{% endif %}
{% elif not watch.has_restock_info %}
<span class="restock-label error">No information</span>
{% endif %}
{% endif %}
</td>
{% endif %}
<td class="last-checked" data-timestamp="{{ watch.last_checked }}">{{watch|format_last_checked_time|safe}}</td>
<td class="last-changed" data-timestamp="{{ watch.last_changed }}">{% if watch.history_n >=2 and watch.last_changed >0 %}
{{watch.last_changed|format_timestamp_timeago}}

View File

@@ -149,15 +149,6 @@ def test_api_simple(client, live_server):
headers={'x-api-key': api_key},
)
assert b'which has this one new line' in res.data
assert b'<div id' not in res.data
# Fetch the HTML of the latest one
res = client.get(
url_for("watchsinglehistory", uuid=watch_uuid, timestamp='latest')+"?html=1",
headers={'x-api-key': api_key},
)
assert b'which has this one new line' in res.data
assert b'<div id' in res.data
# Fetch the whole watch
res = client.get(

View File

@@ -1,235 +0,0 @@
#!/usr/bin/python3
import time
from flask import url_for
from .util import live_server_setup, extract_UUID_from_client, extract_api_key_from_UI, wait_for_all_checks
def set_response_with_ldjson():
test_return_data = """<html>
<body>
Some initial text<br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
<div class="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
<script type="application/ld+json">
{
"@context":"https://schema.org/",
"@type":"Product",
"@id":"https://www.some-virtual-phone-shop.com/celular-iphone-14/p",
"name":"Celular Iphone 14 Pro Max 256Gb E Sim A16 Bionic",
"brand":{
"@type":"Brand",
"name":"APPLE"
},
"image":"https://www.some-virtual-phone-shop.com/15509426/image.jpg",
"description":"You dont need it",
"mpn":"111111",
"sku":"22222",
"Offers":{
"@type":"AggregateOffer",
"lowPrice":8097000,
"highPrice":8099900,
"priceCurrency":"COP",
"offers":[
{
"@type":"Offer",
"price":8097000,
"priceCurrency":"COP",
"availability":"http://schema.org/InStock",
"sku":"102375961",
"itemCondition":"http://schema.org/NewCondition",
"seller":{
"@type":"Organization",
"name":"ajax"
}
}
],
"offerCount":1
}
}
</script>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def set_response_without_ldjson():
test_return_data = """<html>
<body>
Some initial text<br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
<div class="sametext">Some text thats the same</div>
<div class="changetext">Some text that will change</div>
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def test_setup(client, live_server):
live_server_setup(live_server)
# actually only really used by the distll.io importer, but could be handy too
def test_check_ldjson_price_autodetect(client, live_server):
set_response_with_ldjson()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
wait_for_all_checks(client)
# Should get a notice that it's available
res = client.get(url_for("index"))
assert b'ldjson-price-track-offer' in res.data
# Accept it
uuid = extract_UUID_from_client(client)
time.sleep(1)
client.get(url_for('price_data_follower.accept', uuid=uuid, follow_redirects=True))
wait_for_all_checks(client)
# Trigger a check
time.sleep(1)
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
# Offer should be gone
res = client.get(url_for("index"))
assert b'Embedded price data' not in res.data
assert b'tracking-ldjson-price-data' in res.data
# and last snapshop (via API) should be just the price
api_key = extract_api_key_from_UI(client)
res = client.get(
url_for("watchsinglehistory", uuid=uuid, timestamp='latest'),
headers={'x-api-key': api_key},
)
# Should see this (dont know where the whitespace came from)
assert b'"highPrice": 8099900' in res.data
# And not this cause its not the ld-json
assert b"So let's see what happens" not in res.data
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
##########################################################################################
# And we shouldnt see the offer
set_response_without_ldjson()
# Add our URL to the import page
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'ldjson-price-track-offer' not in res.data
##########################################################################################
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
def _test_runner_check_bad_format_ignored(live_server, client, has_ldjson_price_data):
test_url = url_for('test_endpoint', _external=True)
res = client.post(
url_for("import_page"),
data={"urls": test_url},
follow_redirects=True
)
assert b"1 Imported" in res.data
wait_for_all_checks(client)
for k,v in client.application.config.get('DATASTORE').data['watching'].items():
assert v.get('last_error') == False
assert v.get('has_ldjson_price_data') == has_ldjson_price_data
##########################################################################################
client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
def test_bad_ldjson_is_correctly_ignored(client, live_server):
#live_server_setup(live_server)
test_return_data = """
<html>
<head>
<script type="application/ld+json">
{
"@context": "http://schema.org",
"@type": ["Product", "SubType"],
"name": "My test product",
"description": "",
"offers": {
"note" : "You can see the case-insensitive OffERS key, it should work",
"@type": "Offer",
"offeredBy": {
"@type": "Organization",
"name":"Person",
"telephone":"+1 999 999 999"
},
"price": "1",
"priceCurrency": "EUR",
"url": "/some/url"
}
}
</script>
</head>
<body>
<div class="yes">Some extra stuff</div>
</body></html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
_test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=True)
test_return_data = """
<html>
<head>
<script type="application/ld+json">
{
"@context": "http://schema.org",
"@type": ["Product", "SubType"],
"name": "My test product",
"description": "",
"BrokenOffers": {
"@type": "Offer",
"offeredBy": {
"@type": "Organization",
"name":"Person",
"telephone":"+1 999 999 999"
},
"price": "1",
"priceCurrency": "EUR",
"url": "/some/url"
}
}
</script>
</head>
<body>
<div class="yes">Some extra stuff</div>
</body></html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
_test_runner_check_bad_format_ignored(live_server=live_server, client=client, has_ldjson_price_data=False)

View File

@@ -3,8 +3,7 @@
import time
from flask import url_for
from urllib.request import urlopen
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \
extract_UUID_from_client
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI
sleep_time_for_fetch_thread = 3
@@ -142,14 +141,6 @@ def test_check_basic_change_detection_functionality(client, live_server):
assert b'Mark all viewed' not in res.data
assert b'unviewed' not in res.data
# #2458 "clear history" should make the Watch object update its status correctly when the first snapshot lands again
uuid = extract_UUID_from_client(client)
client.get(url_for("clear_watch_history", uuid=uuid))
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'preview/' in res.data
#
# Cleanup everything
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)

View File

@@ -5,13 +5,15 @@ import os
import json
import logging
from flask import url_for
from .util import live_server_setup, wait_for_all_checks
from .util import live_server_setup
from urllib.parse import urlparse, parse_qs
def test_consistent_history(client, live_server):
live_server_setup(live_server)
r = range(1, 30)
# Give the endpoint time to spin up
time.sleep(1)
r = range(1, 50)
for one in r:
test_url = url_for('test_endpoint', content_type="text/html", content=str(one), _external=True)
@@ -23,8 +25,15 @@ def test_consistent_history(client, live_server):
assert b"1 Imported" in res.data
wait_for_all_checks(client)
time.sleep(3)
while True:
res = client.get(url_for("index"))
logging.debug("Waiting for 'Checking now' to go away..")
if b'Checking now' not in res.data:
break
time.sleep(0.5)
time.sleep(3)
# Essentially just triggers the DB write/update
res = client.post(
url_for("settings_page"),
@@ -35,9 +44,8 @@ def test_consistent_history(client, live_server):
)
assert b"Settings updated." in res.data
time.sleep(2)
# Give it time to write it out
time.sleep(3)
json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json')
json_obj = None
@@ -50,7 +58,7 @@ def test_consistent_history(client, live_server):
# each one should have a history.txt containing just one line
for w in json_obj['watching'].keys():
history_txt_index_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, 'history.txt')
assert os.path.isfile(history_txt_index_file), f"History.txt should exist where I expect it at {history_txt_index_file}"
assert os.path.isfile(history_txt_index_file), "History.txt should exist where I expect it - {}".format(history_txt_index_file)
# Same like in model.Watch
with open(history_txt_index_file, "r") as f:
@@ -62,15 +70,15 @@ def test_consistent_history(client, live_server):
w))
# Find the snapshot one
for fname in files_in_watch_dir:
if fname != 'history.txt' and 'html' not in fname:
if fname != 'history.txt':
# contents should match what we requested as content returned from the test url
with open(os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, fname), 'r') as snapshot_f:
contents = snapshot_f.read()
watch_url = json_obj['watching'][w]['url']
u = urlparse(watch_url)
q = parse_qs(u[4])
assert q['content'][0] == contents.strip(), f"Snapshot file {fname} should contain {q['content'][0]}"
assert q['content'][0] == contents.strip(), "Snapshot file {} should contain {}".format(fname, q['content'][0])
assert len(files_in_watch_dir) == 3, "Should be just three files in the dir, html.br snapshot, history.txt and the extracted text snapshot"
assert len(files_in_watch_dir) == 2, "Should be just two files in the dir, history.txt and the snapshot"

View File

@@ -45,6 +45,7 @@ def test_highlight_ignore(client, live_server):
)
res = client.get(url_for("edit_page", uuid=uuid))
# should be a regex now
assert b'/oh\ yeah\ \d+/' in res.data
@@ -54,7 +55,3 @@ def test_highlight_ignore(client, live_server):
# And it should register in the preview page
res = client.get(url_for("preview_page", uuid=uuid))
assert b'<div class="ignored">oh yeah 456' in res.data
# Should be in base.html
assert b'csrftoken' in res.data

View File

@@ -41,26 +41,19 @@ and it can also be repeated
from .. import html_tools
# See that we can find the second <script> one, which is not broken, and matches our filter
text = html_tools.extract_json_as_string(content, "json:$.offers.priceCurrency")
assert text == '"AUD"'
text = html_tools.extract_json_as_string('{"id":5}', "json:$.id")
assert text == "5"
text = html_tools.extract_json_as_string(content, "json:$.offers.price")
assert text == "23.5"
# also check for jq
if jq_support:
text = html_tools.extract_json_as_string(content, "jq:.offers.priceCurrency")
assert text == '"AUD"'
text = html_tools.extract_json_as_string(content, "jq:.offers.price")
assert text == "23.5"
text = html_tools.extract_json_as_string('{"id":5}', "jq:.id")
assert text == "5"
text = html_tools.extract_json_as_string(content, "jqraw:.offers.priceCurrency")
assert text == "AUD"
text = html_tools.extract_json_as_string('{"id":5}', "jqraw:.id")
assert text == "5"
text = html_tools.extract_json_as_string('{"id":5}', "json:$.id")
assert text == "5"
# When nothing at all is found, it should throw JSONNOTFound
# Which is caught and shown to the user in the watch-overview table
@@ -71,9 +64,6 @@ and it can also be repeated
with pytest.raises(html_tools.JSONNotFound) as e_info:
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jq:.id")
with pytest.raises(html_tools.JSONNotFound) as e_info:
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jqraw:.id")
def test_unittest_inline_extract_body():
content = """
@@ -301,10 +291,6 @@ def test_check_jq_filter(client, live_server):
if jq_support:
check_json_filter('jq:.boss.name', client, live_server)
def test_check_jqraw_filter(client, live_server):
if jq_support:
check_json_filter('jqraw:.boss.name', client, live_server)
def check_json_filter_bool_val(json_filter, client, live_server):
set_original_response()
@@ -359,10 +345,6 @@ def test_check_jq_filter_bool_val(client, live_server):
if jq_support:
check_json_filter_bool_val("jq:.available", client, live_server)
def test_check_jqraw_filter_bool_val(client, live_server):
if jq_support:
check_json_filter_bool_val("jq:.available", client, live_server)
# Re #265 - Extended JSON selector test
# Stuff to consider here
# - Selector should be allowed to return empty when it doesnt match (people might wait for some condition)
@@ -509,8 +491,4 @@ def test_check_jsonpath_ext_filter(client, live_server):
def test_check_jq_ext_filter(client, live_server):
if jq_support:
check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server)
def test_check_jqraw_ext_filter(client, live_server):
if jq_support:
check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server)
check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server)

View File

@@ -10,6 +10,7 @@ def test_setup(live_server):
# Hard to just add more live server URLs when one test is already running (I think)
# So we add our test here (was in a different file)
def test_headers_in_request(client, live_server):
#ve_server_setup(live_server)
# Add our URL to the import page
test_url = url_for('test_headers', _external=True)
@@ -378,11 +379,17 @@ def test_headers_textfile_in_request(client, live_server):
with open('test-datastore/' + extract_UUID_from_client(client) + '/headers.txt', 'w') as f:
f.write("watch-header: nice")
wait_for_all_checks(client)
client.get(url_for("form_watch_checknow"), follow_redirects=True)
# Give the thread time to pick it up
# Give the thread time to pick it up, this actually is not super reliable and pytest can terminate before the check is ran
wait_for_all_checks(client)
# WARNING - pytest and 'wait_for_all_checks' shuts down before it has actually stopped processing when using pyppeteer fetcher
# so adding more time here
if os.getenv('FAST_PUPPETEER_CHROME_FETCHER'):
time.sleep(6)
res = client.get(url_for("edit_page", uuid="first"))
assert b"Extra headers file found and will be added to this watch" in res.data

View File

@@ -0,0 +1,258 @@
#!/usr/bin/python3
from flask import url_for
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
instock_props = [
# LD+JSON with non-standard list of 'type' https://github.com/dgtlmoon/changedetection.io/issues/1833
'<script type=\'application/ld+json\'>{"@context": "http://schema.org","@type": ["Product", "SubType"],"name": "My test product","description":"","Offers": { "@type": "Offer", "offeredBy": { "@type": "Organization", "name":"Person", "telephone":"+1 999 999 999" }, "price": $$PRICE$$, "priceCurrency": "EUR", "url": "/some/url", "availability": "http://schema.org/InStock"} }</script>',
# LD JSON
'<script type=\'application/ld+json\'>[{"@context":"http://schema.org","@type":"WebSite","name":"partsDíly.cz","description":"Nejlevnější autodlíly.","url":"https://parts.com/?id=3038915","potentialAction":{"@type":"SearchAction","target":"https://parts.com/vyhledavani?search={query}","query-input":{"@type":"PropertyValueSpecification","valueRequired":"http://schema.org/True","valueName":"query"}},"publisher":{"@context":"http://schema.org","@type":"Organization","name":"Car Díly.cz","url":"https://carparts.com/","logo":"https://parts.com/77026_3195959275.png","sameAs":["https://twitter.com/parts","https://www.instagram.com/parts/?hl=cs"]},"sameAs":["https://twitter.com/parts","https://www.instagram.com/parts/"]},{"@context":"http://schema.org","@type":"BreadcrumbList","itemListElement":[{"@type":"ListItem","position":0,"item":{"@id":"/autodily","name":"Autodíly pro osobní vozy"}},{"@type":"ListItem","position":1,"item":{"@id":"/autodily/dodge","name":"DODGE"}},{"@type":"ListItem","position":2,"item":{"@id":"https://parts.com/280kw","name":"parts parts • 100 kW"}}]},{"@context":"http://schema.org","@type":"Product","name":"Olejový filtr K&N Filters","description":"","mpn":"xxx11","brand":"K&N Filters","image":"https://parts.com/images/1600/c8fe1f1428021f4fe17a39297686178b04cba885.jpg","offers":{"@context":"http://schema.org","@type":"Offer","price":$$PRICE$$,"priceCurrency":"CZK","url":"https://parts.com/filters/hp","availability":"http://schema.org/InStock"}}]</script>',
'<script id="product-jsonld" type="application/ld+json">{"@context":"https://schema.org","@type":"Product","brand":{"@type":"Brand","name":"Ubiquiti"},"name":"UniFi Express","sku":"UX","description":"Impressively compact UniFi Cloud Gateway and WiFi 6 access point that runs UniFi Network. Powers an entire network or simply meshes as an access point.","url":"https://store.ui.com/us/en/products/ux","image":{"@type":"ImageObject","url":"https://cdn.ecomm.ui.com/products/4ed25b4c-db92-4b98-bbf3-b0989f007c0e/123417a2-895e-49c7-ba04-b6cd8f6acc03.png","width":"1500","height":"1500"},"offers":{"@type":"Offer","availability":"https://schema.org/InStock","priceSpecification":{"@type":"PriceSpecification","price":$$PRICE$$,"priceCurrency":"USD","valueAddedTaxIncluded":false}}}</script>',
'<script id="product-schema" type="application/ld+json">{"@context": "https://schema.org","@type": "Product","itemCondition": "https://schema.org/NewCondition","image": "//1.com/hmgo","name": "Polo MuscleFit","color": "Beige","description": "Polo","sku": "0957102010","brand": {"@type": "Brand","name": "H&M"},"category": {"@type": "Thing","name": "Polo"},"offers": [{"@type": "Offer","url": "https:/www2.xxxxxx.com/fr_fr/productpage.0957102010.html","priceCurrency": "EUR","price": $$PRICE$$,"availability": "http://schema.org/InStock","seller": { "@type": "Organization", "name": "H&amp;M"}}]}</script>'
# Microdata
'<div itemscope itemtype="https://schema.org/Product"><h1 itemprop="name">Example Product</h1><p itemprop="description">This is a sample product description.</p><div itemprop="offers" itemscope itemtype="https://schema.org/Offer"><p>Price: <span itemprop="price">$$$PRICE$$</span></p><link itemprop="availability" href="https://schema.org/InStock" /></div></div>'
]
out_of_stock_props = [
# out of stock AND contains multiples
'<script type="application/ld+json">{"@context":"http://schema.org","@type":"WebSite","url":"https://www.medimops.de/","potentialAction":{"@type":"SearchAction","target":"https://www.medimops.de/produkte-C0/?fcIsSearch=1&searchparam={searchparam}","query-input":"required name=searchparam"}}</script><script type="application/ld+json">{"@context":"http://schema.org","@type":"Product","name":"Horsetrader: Robert Sangster and the Rise and Fall of the Sport of Kings","image":"https://images2.medimops.eu/product/43a982/M00002551322-large.jpg","productID":"isbn:9780002551328","gtin13":"9780002551328","category":"Livres en langue étrangère","offers":{"@type":"Offer","priceCurrency":"EUR","price":$$PRICE$$,"itemCondition":"UsedCondition","availability":"OutOfStock"},"brand":{"@type":"Thing","name":"Patrick Robinson","url":"https://www.momox-shop.fr/,patrick-robinson/"}}</script>'
]
def set_original_response(props_markup='', price="121.95"):
props_markup=props_markup.replace('$$PRICE$$', price)
test_return_data = f"""<html>
<body>
Some initial text<br>
<p>Which is across multiple lines</p>
<br>
So let's see what happens. <br>
<div>price: ${price}</div>
{props_markup}
</body>
</html>
"""
with open("test-datastore/endpoint-content.txt", "w") as f:
f.write(test_return_data)
return None
def test_setup(client, live_server):
live_server_setup(live_server)
def test_restock_itemprop_basic(client, live_server):
#live_server_setup(live_server)
test_url = url_for('test_endpoint', _external=True)
for p in instock_props:
set_original_response(props_markup=p)
client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tags": 'restock tests', 'processor': 'restock_diff'},
follow_redirects=True
)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'has-restock-info in-stock' in res.data
assert b'has-restock-info not-in-stock' not in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
for p in out_of_stock_props:
set_original_response(props_markup=p)
client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tags": '', 'processor': 'restock_diff'},
follow_redirects=True
)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'has-restock-info not-in-stock' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_itemprop_price_change(client, live_server):
#live_server_setup(live_server)
test_url = url_for('test_endpoint', _external=True)
set_original_response(props_markup=instock_props[0], price="190.95")
client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tags": 'restock tests', 'processor': 'restock_diff'},
follow_redirects=True
)
# A change in price, should trigger a change by default
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'190.95' in res.data
# basic price change, look for notification
set_original_response(props_markup=instock_props[0], price='180.45')
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'180.45' in res.data
assert b'unviewed' in res.data
client.get(url_for("mark_all_viewed"), follow_redirects=True)
# turning off price change trigger, but it should show the new price, with no change notification
set_original_response(props_markup=instock_props[0], price='120.45')
res = client.post(
url_for("edit_page", uuid="first"),
data={"follow_price_changes": "", "url": test_url, "tags": "", "headers": "", 'fetch_backend': "html_requests"},
follow_redirects=True
)
assert b"Updated watch." in res.data
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'120.45' in res.data
assert b'unviewed' not in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_itemprop_price_minmax_limit(client, live_server):
#live_server_setup(live_server)
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
test_url = url_for('test_endpoint', _external=True)
set_original_response(props_markup=instock_props[0], price="950.95")
client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tags": 'restock tests', 'processor': 'restock_diff'},
follow_redirects=True
)
# A change in price, should trigger a change by default
wait_for_all_checks(client)
res = client.post(
url_for("edit_page", uuid="first"),
data={"follow_price_changes": "y",
"price_change_min": 900.0,
"price_change_max": 1100.10,
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests"
},
follow_redirects=True
)
assert b"Updated watch." in res.data
wait_for_all_checks(client)
client.get(url_for("mark_all_viewed"))
# price changed to something greater than min (900), and less than max (1100).. should be no change
set_original_response(props_markup=instock_props[0], price='1000.45')
client.get(url_for("form_watch_checknow"))
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'1000.45' in res.data
assert b'unviewed' not in res.data
# price changed to something LESS than min (900), SHOULD be a change
set_original_response(props_markup=instock_props[0], price='890.45')
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
assert b'1 watches queued for rechecking.' in res.data
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'890.45' in res.data
assert b'unviewed' in res.data
client.get(url_for("mark_all_viewed"))
# price changed to something MORE than max (1100.10), SHOULD be a change
set_original_response(props_markup=instock_props[0], price='1890.45')
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'1890.45' in res.data
assert b'unviewed' in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
def test_itemprop_percent_threshold(client, live_server):
#live_server_setup(live_server)
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data
test_url = url_for('test_endpoint', _external=True)
set_original_response(props_markup=instock_props[0], price="950.95")
client.post(
url_for("form_quick_watch_add"),
data={"url": test_url, "tags": 'restock tests', 'processor': 'restock_diff'},
follow_redirects=True
)
# A change in price, should trigger a change by default
wait_for_all_checks(client)
res = client.post(
url_for("edit_page", uuid="first"),
data={"follow_price_changes": "y",
"price_change_threshold_percent": 5.0,
"url": test_url,
"tags": "",
"headers": "",
'fetch_backend': "html_requests"
},
follow_redirects=True
)
assert b"Updated watch." in res.data
wait_for_all_checks(client)
# Basic change should not trigger
set_original_response(props_markup=instock_props[0], price='960.45')
client.get(url_for("form_watch_checknow"))
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'960.45' in res.data
assert b'unviewed' not in res.data
# Bigger INCREASE change than the threshold should trigger
set_original_response(props_markup=instock_props[0], price='1960.45')
client.get(url_for("form_watch_checknow"))
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'1960.45' in res.data
assert b'unviewed' in res.data
# Small decrease should NOT trigger
client.get(url_for("mark_all_viewed"))
set_original_response(props_markup=instock_props[0], price='1950.45')
client.get(url_for("form_watch_checknow"))
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'1950.45' in res.data
assert b'unviewed' not in res.data
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
assert b'Deleted' in res.data

View File

@@ -0,0 +1,21 @@
#!/usr/bin/python3
# run from dir above changedetectionio/ dir
# python3 -m unittest changedetectionio.tests.unit.test_restock_logic
import unittest
import os
from changedetectionio.processors import restock_diff
# mostly
class TestDiffBuilder(unittest.TestCase):
def test_logic(self):
assert restock_diff.is_between(number=10, lower=9, upper=11) == True, "Between 9 and 11"
assert restock_diff.is_between(number=10, lower=0, upper=11) == True, "Between 9 and 11"
assert restock_diff.is_between(number=10, lower=None, upper=11) == True, "Between None and 11"
assert not restock_diff.is_between(number=12, lower=None, upper=11) == True, "12 is not between None and 11"
if __name__ == '__main__':
unittest.main()

View File

@@ -121,18 +121,21 @@ def extract_UUID_from_client(client):
return uuid.strip()
def wait_for_all_checks(client):
# actually this is not entirely true, it can still be 'processing' but not in the queue
# Loop waiting until done..
attempt=0
time.sleep(0.1)
# because sub-second rechecks are problematic in testing, use lots of delays
time.sleep(1)
while attempt < 60:
time.sleep(1)
res = client.get(url_for("index"))
if not b'Checking now' in res.data:
break
logging.getLogger().info("Waiting for watch-list to not say 'Checking now'.. {}".format(attempt))
time.sleep(1)
attempt += 1
time.sleep(1)
def live_server_setup(live_server):
@live_server.app.route('/test-random-content-endpoint')

View File

@@ -1,12 +1,11 @@
from . import content_fetchers
from .processors.restock_diff import UnableToExtractRestockData
from .processors.text_json_diff import FilterNotFoundInResponse
from changedetectionio import html_tools
from copy import deepcopy
import os
import queue
import threading
import queue
import time
from . import content_fetchers
from changedetectionio import html_tools
from .processors.text_json_diff import FilterNotFoundInResponse
from .processors.restock_diff import UnableToExtractRestockData
# A single update worker
#
@@ -246,18 +245,14 @@ class update_worker(threading.Thread):
contents = b''
process_changedetection_results = True
update_obj = {}
# Clear last errors (move to preflight func?)
self.datastore.data['watching'][uuid]['browser_steps_last_error_step'] = None
watch = self.datastore.data['watching'].get(uuid)
logger.info(f"Processing watch UUID {uuid} Priority {queued_item_data.priority} URL {watch['url']}")
logger.info(f"Processing watch UUID {uuid} "
f"Priority {queued_item_data.priority} "
f"URL {self.datastore.data['watching'][uuid]['url']}")
now = time.time()
try:
# Processor is what we are using for detecting the "Change"
processor = watch.get('processor', 'text_json_diff')
processor = self.datastore.data['watching'][uuid].get('processor', 'text_json_diff')
# if system...
# Abort processing when the content was the same as the last fetch
@@ -277,12 +272,14 @@ class update_worker(threading.Thread):
watch_uuid=uuid
)
# Clear last errors (move to preflight func?)
self.datastore.data['watching'][uuid]['browser_steps_last_error_step'] = None
update_handler.call_browser()
changed_detected, update_obj, contents = update_handler.run_changedetection(
watch=watch,
skip_when_checksum_same=skip_when_same_checksum,
)
changed_detected, update_obj, contents = update_handler.run_changedetection(uuid,
skip_when_checksum_same=skip_when_same_checksum,
)
# Re #342
# In Python 3, all strings are sequences of Unicode characters. There is a bytes type that holds raw bytes.
@@ -312,11 +309,7 @@ class update_worker(threading.Thread):
})
if e.screenshot:
watch.save_screenshot(screenshot=e.screenshot, as_error=True)
if e.xpath_data:
watch.save_xpath_data(data=e.xpath_data)
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot)
process_changedetection_results = False
except content_fetchers.exceptions.Non200ErrorCodeReceived as e:
@@ -332,11 +325,11 @@ class update_worker(threading.Thread):
err_text = "Error - Request returned a HTTP error code {}".format(str(e.status_code))
if e.screenshot:
watch.save_screenshot(screenshot=e.screenshot, as_error=True)
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
if e.xpath_data:
watch.save_xpath_data(data=e.xpath_data, as_error=True)
self.datastore.save_xpath_data(watch_uuid=uuid, data=e.xpath_data, as_error=True)
if e.page_text:
watch.save_error_text(contents=e.page_text)
self.datastore.save_error_text(watch_uuid=uuid, contents=e.page_text)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
process_changedetection_results = False
@@ -348,23 +341,16 @@ class update_worker(threading.Thread):
err_text = "Warning, no filters were found, no change detection ran - Did the page change layout? update your Visual Filter if necessary."
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
# Filter wasnt found, but we should still update the visual selector so that they can have a chance to set it up again
if e.screenshot:
watch.save_screenshot(screenshot=e.screenshot)
if e.xpath_data:
watch.save_xpath_data(data=e.xpath_data)
# Only when enabled, send the notification
if watch.get('filter_failure_notification_send', False):
c = watch.get('consecutive_filter_failures', 5)
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)
c += 1
# Send notification if we reached the threshold?
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
0)
logger.warning(f"Filter for {uuid} not found, consecutive_filter_failures: {c}")
if threshold > 0 and c >= threshold:
if not watch.get('notification_muted'):
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_filter_failure_notification(uuid)
c = 0
@@ -414,15 +400,15 @@ class update_worker(threading.Thread):
}
)
if watch.get('filter_failure_notification_send', False):
c = watch.get('consecutive_filter_failures', 5)
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False):
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5)
c += 1
# Send notification if we reached the threshold?
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
0)
logger.error(f"Step for {uuid} not found, consecutive_filter_failures: {c}")
if threshold > 0 and c >= threshold:
if not watch.get('notification_muted'):
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_step_failure_notification(watch_uuid=uuid, step_n=e.step_n)
c = 0
@@ -444,7 +430,7 @@ class update_worker(threading.Thread):
except content_fetchers.exceptions.JSActionExceptions as e:
err_text = "Error running JS Actions - Page request - "+e.message
if e.screenshot:
watch.save_screenshot(screenshot=e.screenshot, as_error=True)
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code})
process_changedetection_results = False
@@ -454,7 +440,7 @@ class update_worker(threading.Thread):
err_text = "{} - {}".format(err_text, e.message)
if e.screenshot:
watch.save_screenshot(screenshot=e.screenshot, as_error=True)
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code,
@@ -468,9 +454,12 @@ class update_worker(threading.Thread):
except UnableToExtractRestockData as e:
# Usually when fetcher.instock_data returns empty
logger.error(f"Exception (UnableToExtractRestockData) reached processing watch UUID: {uuid}")
logger.error(str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server)"})
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid,
update_obj={
'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server), no embedded stock information was found and nothing interesting in the text, try using this watch with Chrome.",
}
)
process_changedetection_results = False
except Exception as e:
logger.error(f"Exception reached processing watch UUID: {uuid}")
@@ -478,6 +467,8 @@ class update_worker(threading.Thread):
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Exception: " + str(e)})
# Other serious error
process_changedetection_results = False
# import traceback
# print(traceback.format_exc())
else:
# Crash protection, the watch entry could have been removed by this point (during a slow chrome fetch etc)
@@ -485,7 +476,7 @@ class update_worker(threading.Thread):
continue
# Mark that we never had any failures
if not watch.get('ignore_status_codes'):
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'):
update_obj['consecutive_filter_failures'] = 0
# Everything ran OK, clean off any previous error
@@ -493,48 +484,25 @@ class update_worker(threading.Thread):
self.cleanup_error_artifacts(uuid)
if not self.datastore.data['watching'].get(uuid):
continue
#
# Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc
if process_changedetection_results:
# Always save the screenshot if it's available
if update_handler.screenshot:
watch.save_screenshot(screenshot=update_handler.screenshot)
if update_handler.xpath_data:
watch.save_xpath_data(data=update_handler.xpath_data)
try:
watch = self.datastore.data['watching'].get(uuid)
self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
# Also save the snapshot on the first time checked
if changed_detected or not watch.get('last_checked'):
timestamp = round(time.time())
# Small hack so that we sleep just enough to allow 1 second between history snapshots
# this is because history.txt indexes/keys snapshots by epoch seconds and we dont want dupe keys
if watch.newest_history_key and int(timestamp) == int(watch.newest_history_key):
logger.warning(
f"Timestamp {timestamp} already exists, waiting 1 seconds so we have a unique key in history.txt")
timestamp = str(int(timestamp) + 1)
time.sleep(1)
if changed_detected or not watch['last_checked']:
watch.save_history_text(contents=contents,
timestamp=timestamp,
timestamp=str(round(time.time())),
snapshot_id=update_obj.get('previous_md5', 'none'))
if update_handler.fetcher.content:
watch.save_last_fetched_html(contents=update_handler.fetcher.content, timestamp=timestamp)
# A change was detected
if changed_detected:
# Notifications should only trigger on the second time (first time, we gather the initial snapshot)
if watch.history_n >= 2:
logger.info(f"Change detected in UUID {uuid} - {watch['url']}")
if not watch.get('notification_muted'):
if not self.datastore.data['watching'][uuid].get('notification_muted'):
self.send_content_changed_notification(watch_uuid=uuid)
else:
logger.info(f"Change triggered in UUID {uuid} due to first history saving (no notifications sent) - {watch['url']}")
@@ -545,23 +513,29 @@ class update_worker(threading.Thread):
logger.critical(str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
if self.datastore.data['watching'].get(uuid):
# Always record that we atleast tried
count = self.datastore.data['watching'][uuid].get('check_count', 0) + 1
# Always record that we atleast tried
count = watch.get('check_count', 0) + 1
# Record the 'server' header reply, can be used for actions in the future like cloudflare/akamai workarounds
try:
server_header = update_handler.fetcher.headers.get('server', '').strip().lower()[:255]
self.datastore.update_watch(uuid=uuid,
update_obj={'remote_server_reply': server_header}
)
except Exception as e:
pass
# Record the 'server' header reply, can be used for actions in the future like cloudflare/akamai workarounds
try:
server_header = update_handler.fetcher.headers.get('server', '').strip().lower()[:255]
self.datastore.update_watch(uuid=uuid,
update_obj={'remote_server_reply': server_header}
)
except Exception as e:
pass
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
'last_checked': round(time.time()),
'check_count': count
})
self.datastore.update_watch(uuid=uuid, update_obj={'fetch_time': round(time.time() - now, 3),
'last_checked': round(time.time()),
'check_count': count
})
# Always save the screenshot if it's available
if update_handler.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=update_handler.screenshot)
if update_handler.xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=update_handler.xpath_data)
self.current_uuid = None # Done

View File

@@ -68,10 +68,9 @@ services:
# If WEBDRIVER or PLAYWRIGHT are enabled, changedetection container depends on that
# and must wait before starting (substitute "browser-chrome" with "playwright-chrome" if last one is used)
# depends_on:
# playwright-chrome:
# condition: service_started
# depends_on:
# browser-chrome:
# condition: service_started
# Used for fetching pages via Playwright+Chrome where you need Javascript support.
# RECOMMENDED FOR FETCHING PAGES WITH CHROME

View File

@@ -82,5 +82,9 @@ pytest-flask ~=1.2
jsonschema==4.17.3
loguru
# For scraping all possible metadata relating to products so we can do better restock detection
extruct
# Needed for > 3.10, https://github.com/microsoft/playwright-python/issues/2096
greenlet >= 3.0.3