Compare commits
516 Commits
0.38.1
...
proxies-js
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b50ebb5c9 | ||
|
|
22638399c1 | ||
|
|
e3381776f2 | ||
|
|
26e2f21a80 | ||
|
|
b6009ae9ff | ||
|
|
b046d6ef32 | ||
|
|
e154a3cb7a | ||
|
|
1262700263 | ||
|
|
434c5813b9 | ||
|
|
0a3dc7d77b | ||
|
|
a7e296de65 | ||
|
|
bd0fbaaf27 | ||
|
|
0c111bd9ae | ||
|
|
ed9ac0b7fb | ||
|
|
743a3069bb | ||
|
|
fefc39427b | ||
|
|
2c6faa7c4e | ||
|
|
6168cd2899 | ||
|
|
f3c7c969d8 | ||
|
|
1355c2a245 | ||
|
|
96cf1a06df | ||
|
|
019a4a0375 | ||
|
|
db2f7b80ea | ||
|
|
bfabd7b094 | ||
|
|
d92dbfe765 | ||
|
|
67d2441334 | ||
|
|
3c30bc02d5 | ||
|
|
dcb54117d5 | ||
|
|
b1e32275dc | ||
|
|
e2a6865932 | ||
|
|
f04adb7202 | ||
|
|
1193a7f22c | ||
|
|
0b976827bb | ||
|
|
280e916033 | ||
|
|
5494e61a05 | ||
|
|
e461c0b819 | ||
|
|
d67c654f37 | ||
|
|
06ab34b6af | ||
|
|
ba8676c4ba | ||
|
|
4899c1a4f9 | ||
|
|
9bff1582f7 | ||
|
|
269e3bb7c5 | ||
|
|
9976f3f969 | ||
|
|
1f250aa868 | ||
|
|
1c08d9f150 | ||
|
|
9942107016 | ||
|
|
1eb5726cbf | ||
|
|
b3271ff7bb | ||
|
|
f82d3b648a | ||
|
|
034b1330d4 | ||
|
|
a7d005109f | ||
|
|
048c355e04 | ||
|
|
4026575b0b | ||
|
|
8c466b4826 | ||
|
|
6f072b42e8 | ||
|
|
e318253f31 | ||
|
|
f0f2fe94ce | ||
|
|
26f5c56ba4 | ||
|
|
a1c3107cd6 | ||
|
|
8fef3ff4ab | ||
|
|
baa25c9f9e | ||
|
|
488699b7d4 | ||
|
|
cf3a1ee3e3 | ||
|
|
daae43e9f9 | ||
|
|
cdeedaa65c | ||
|
|
3c9d2ded38 | ||
|
|
9f4364a130 | ||
|
|
5bd9eaf99d | ||
|
|
b1c51c0a65 | ||
|
|
232bd92389 | ||
|
|
e6173357a9 | ||
|
|
f2b8888aff | ||
|
|
9c46f175f9 | ||
|
|
1f27865fdf | ||
|
|
faa42d75e0 | ||
|
|
3b6e6d85bb | ||
|
|
30d6a272ce | ||
|
|
291700554e | ||
|
|
a82fad7059 | ||
|
|
c2fe5ae0d1 | ||
|
|
5beefdb7cc | ||
|
|
872bbba71c | ||
|
|
d578de1a35 | ||
|
|
cdc104be10 | ||
|
|
dd0eeca056 | ||
|
|
a95468be08 | ||
|
|
ace44d0e00 | ||
|
|
ebb8b88621 | ||
|
|
12fc2200de | ||
|
|
52d3d375ba | ||
|
|
08117089e6 | ||
|
|
2ba3a6d53f | ||
|
|
2f636553a9 | ||
|
|
0bde48b282 | ||
|
|
fae1164c0b | ||
|
|
169c293143 | ||
|
|
46cb5cff66 | ||
|
|
05584ea886 | ||
|
|
176a591357 | ||
|
|
15569f9592 | ||
|
|
5f9e475fe0 | ||
|
|
34b8784f50 | ||
|
|
2b054ced8c | ||
|
|
6553980cd5 | ||
|
|
7c12c47204 | ||
|
|
dbd9b470d7 | ||
|
|
83555a9991 | ||
|
|
5bfdb28bd2 | ||
|
|
31a6a6717b | ||
|
|
7da32f9ac3 | ||
|
|
bb732d3d2e | ||
|
|
485e55f9ed | ||
|
|
601a20ea49 | ||
|
|
76996b9eb8 | ||
|
|
fba2b1a39d | ||
|
|
4a91505af5 | ||
|
|
4841c79b4c | ||
|
|
2ba00d2e1d | ||
|
|
19c96f4bdd | ||
|
|
82b900fbf4 | ||
|
|
358a365303 | ||
|
|
a07ca4b136 | ||
|
|
ba8cf2c8cf | ||
|
|
3106b6688e | ||
|
|
2c83845dac | ||
|
|
111266d6fa | ||
|
|
ead610151f | ||
|
|
7e1e763989 | ||
|
|
327cc4af34 | ||
|
|
6008ff516e | ||
|
|
cdcf4b353f | ||
|
|
1ab70f8e86 | ||
|
|
8227c012a7 | ||
|
|
c113d5fb24 | ||
|
|
8c8d4066d7 | ||
|
|
277dc9e1c1 | ||
|
|
fc0fd1ce9d | ||
|
|
bd6127728a | ||
|
|
4101ae00c6 | ||
|
|
62f14df3cb | ||
|
|
560d465c59 | ||
|
|
7929aeddfc | ||
|
|
8294519f43 | ||
|
|
8ba8a220b6 | ||
|
|
aa3c8a9370 | ||
|
|
dbb5468cdc | ||
|
|
329c7620fb | ||
|
|
1f974bfbb0 | ||
|
|
437c8525af | ||
|
|
a2a1d5ae90 | ||
|
|
2566de2aae | ||
|
|
dfec8dbb39 | ||
|
|
5cefb16e52 | ||
|
|
341ae24b73 | ||
|
|
f47c2fb7f6 | ||
|
|
9d742446ab | ||
|
|
e3e022b0f4 | ||
|
|
6de4027c27 | ||
|
|
cda3837355 | ||
|
|
7983675325 | ||
|
|
eef56e52c6 | ||
|
|
8e3195f394 | ||
|
|
e17c2121f7 | ||
|
|
07e279b38d | ||
|
|
2c834cfe37 | ||
|
|
dbb5c666f0 | ||
|
|
70b3493866 | ||
|
|
3b11c474d1 | ||
|
|
890e1e6dcd | ||
|
|
6734fb91a2 | ||
|
|
16809b48f8 | ||
|
|
67c833d2bc | ||
|
|
31fea55ee4 | ||
|
|
b6c50d3b1a | ||
|
|
034507f14f | ||
|
|
0e385b1c22 | ||
|
|
f28c260576 | ||
|
|
18f0b63b7d | ||
|
|
97045e7a7b | ||
|
|
9807cf0cda | ||
|
|
d4b5237103 | ||
|
|
dc6f76ba64 | ||
|
|
1f2f93184e | ||
|
|
0f08c8dda3 | ||
|
|
68db20168e | ||
|
|
1d4474f5a3 | ||
|
|
613308881c | ||
|
|
f69585b276 | ||
|
|
0179940df1 | ||
|
|
c0d0424e7e | ||
|
|
014dc61222 | ||
|
|
06517bfd22 | ||
|
|
b3a115dd4a | ||
|
|
ffc4215411 | ||
|
|
9e708810d1 | ||
|
|
1e8aa6158b | ||
|
|
015353eccc | ||
|
|
501183e66b | ||
|
|
def74f27e6 | ||
|
|
37775a46c6 | ||
|
|
e4eaa0c817 | ||
|
|
206ded4201 | ||
|
|
9e71f2aa35 | ||
|
|
f9594aeffb | ||
|
|
b4e1353376 | ||
|
|
5b670c38d3 | ||
|
|
2a9fb12451 | ||
|
|
6c3c5dc28a | ||
|
|
8f062bfec9 | ||
|
|
380c512cc2 | ||
|
|
d7ed7c44ed | ||
|
|
34a87c0f41 | ||
|
|
4074fe53f1 | ||
|
|
44d599d0d1 | ||
|
|
615fe9290a | ||
|
|
2cc6955bc3 | ||
|
|
9809af142d | ||
|
|
1890881977 | ||
|
|
9fc2fe85d5 | ||
|
|
bb3c546838 | ||
|
|
165f794595 | ||
|
|
a440eece9e | ||
|
|
34c83f0e7c | ||
|
|
f6e518497a | ||
|
|
63e91a3d66 | ||
|
|
3034d047c2 | ||
|
|
2620818ba7 | ||
|
|
9fe4f95990 | ||
|
|
ffd2a89d60 | ||
|
|
8f40f19328 | ||
|
|
082634f851 | ||
|
|
334010025f | ||
|
|
81aa8fa16b | ||
|
|
c79d6824e3 | ||
|
|
946377d2be | ||
|
|
5db9a30ad4 | ||
|
|
1d060225e1 | ||
|
|
7e0f0d0fd8 | ||
|
|
8b2afa2220 | ||
|
|
f55ffa0f62 | ||
|
|
942c3f021f | ||
|
|
5483f5d694 | ||
|
|
f2fa638480 | ||
|
|
82d1a7f73e | ||
|
|
9fc291fb63 | ||
|
|
3e8a15456a | ||
|
|
2a03f3f57e | ||
|
|
ffad5cca97 | ||
|
|
60a9a786e0 | ||
|
|
165e950e55 | ||
|
|
c25294ca57 | ||
|
|
d4359c2e67 | ||
|
|
44fc804991 | ||
|
|
b72c9eaf62 | ||
|
|
7ce9e4dfc2 | ||
|
|
3cc6586695 | ||
|
|
09204cb43f | ||
|
|
a709122874 | ||
|
|
efbeaf9535 | ||
|
|
1a19fba07d | ||
|
|
eb9020c175 | ||
|
|
13bb44e4f8 | ||
|
|
47f294c23b | ||
|
|
a4cce16188 | ||
|
|
69aec23d1d | ||
|
|
f85ccffe0a | ||
|
|
0005131472 | ||
|
|
3be1f4ea44 | ||
|
|
46c72a7fb3 | ||
|
|
96664ffb10 | ||
|
|
615fa2c5b2 | ||
|
|
fd45fcce2f | ||
|
|
75ca7ec504 | ||
|
|
8b1e9f6591 | ||
|
|
883aa968fd | ||
|
|
3240ed2339 | ||
|
|
a89ffffc76 | ||
|
|
fda93c3798 | ||
|
|
a51c555964 | ||
|
|
b401998030 | ||
|
|
014fda9058 | ||
|
|
dd384619e0 | ||
|
|
85715120e2 | ||
|
|
a0e4f9b88a | ||
|
|
04bef6091e | ||
|
|
536948c8c6 | ||
|
|
d4f4ab306a | ||
|
|
8d2e240a2a | ||
|
|
d7ed479ca2 | ||
|
|
f25cdf0a67 | ||
|
|
5214a7e0f3 | ||
|
|
eb3dca3805 | ||
|
|
a580c238b6 | ||
|
|
7ca89f5ec3 | ||
|
|
8ab8aaa6ae | ||
|
|
22ef9afb93 | ||
|
|
abaec224f6 | ||
|
|
5a645fb74d | ||
|
|
14db60e518 | ||
|
|
e250c552d0 | ||
|
|
8e54a17e14 | ||
|
|
8607eccaad | ||
|
|
17511d0d7d | ||
|
|
41b806228c | ||
|
|
453cf81e1d | ||
|
|
0095b28ea3 | ||
|
|
73101a47e7 | ||
|
|
03f776ca45 | ||
|
|
39b7be9e7a | ||
|
|
6611823962 | ||
|
|
c1c453e4fe | ||
|
|
4887180671 | ||
|
|
ac7378b7fb | ||
|
|
eeba8c864d | ||
|
|
abe88192f4 | ||
|
|
af8efbb6d2 | ||
|
|
bbc2875ef3 | ||
|
|
b7ca10ebac | ||
|
|
a896493797 | ||
|
|
e5fe095f16 | ||
|
|
271181968f | ||
|
|
8206383ee5 | ||
|
|
ecfc02ba23 | ||
|
|
3331ccd061 | ||
|
|
bd8f389a65 | ||
|
|
bc74227635 | ||
|
|
07c60a6acc | ||
|
|
7916faf58b | ||
|
|
febb2bbf0d | ||
|
|
59d31bf76f | ||
|
|
f87f7077a6 | ||
|
|
f166ab1e30 | ||
|
|
55e679e973 | ||
|
|
e211ba806f | ||
|
|
b33105d576 | ||
|
|
b73f5a5c88 | ||
|
|
023951a10e | ||
|
|
fbd9ecab62 | ||
|
|
b5c1fce136 | ||
|
|
489671dcca | ||
|
|
d4dc3466dc | ||
|
|
0439acacbe | ||
|
|
735fc2ac8e | ||
|
|
8a825f0055 | ||
|
|
d0ae8b7923 | ||
|
|
a504773941 | ||
|
|
feb8e6c76c | ||
|
|
a37a5038d8 | ||
|
|
f1933b786c | ||
|
|
d6a6ef2c1d | ||
|
|
cf9554b169 | ||
|
|
d602cf4646 | ||
|
|
dfcae4ee64 | ||
|
|
e3bcd8c9bf | ||
|
|
c4990fa3f9 | ||
|
|
98461d813e | ||
|
|
8ec17a4c83 | ||
|
|
ee708cc395 | ||
|
|
8a670c029a | ||
|
|
9fa5aec01e | ||
|
|
43c9cb8b0c | ||
|
|
b6a359d55b | ||
|
|
ae5a88beea | ||
|
|
a899d338e9 | ||
|
|
7975e8ec2e | ||
|
|
ce383bcd04 | ||
|
|
0b0cdb101b | ||
|
|
396509bae8 | ||
|
|
2973f40035 | ||
|
|
067fac862c | ||
|
|
20647ea319 | ||
|
|
fafc7fda62 | ||
|
|
b1aaf9f277 | ||
|
|
18987aeb23 | ||
|
|
856789a9ba | ||
|
|
2857c7bb77 | ||
|
|
df951637c4 | ||
|
|
ba6fe076bb | ||
|
|
9815fc2526 | ||
|
|
e71dbbe771 | ||
|
|
bd222c99c6 | ||
|
|
4b002ad9e0 | ||
|
|
fe2ffd6356 | ||
|
|
266bebb5bc | ||
|
|
115ff5bc2e | ||
|
|
dd6a24d337 | ||
|
|
f0d418d58c | ||
|
|
10d3b09051 | ||
|
|
35d0c74454 | ||
|
|
dd450b81ad | ||
|
|
512d76c52b | ||
|
|
5a10acfd09 | ||
|
|
a7c09c8990 | ||
|
|
9235eae608 | ||
|
|
5bbd82be79 | ||
|
|
7f8c0fb2fa | ||
|
|
489eedf34e | ||
|
|
3956b3fd68 | ||
|
|
61c1d213d0 | ||
|
|
e07f573f64 | ||
|
|
ecba130fdb | ||
|
|
ff6dc842c0 | ||
|
|
4659993ecf | ||
|
|
0a29b3a582 | ||
|
|
c55bf418c5 | ||
|
|
4bbb7d99b6 | ||
|
|
a8e92e2226 | ||
|
|
c17327633f | ||
|
|
56d1dde7c3 | ||
|
|
6e4ddacaf8 | ||
|
|
3195ffa1c6 | ||
|
|
c749d2ee44 | ||
|
|
ec94359f3c | ||
|
|
4d0bd58eb1 | ||
|
|
3525f43469 | ||
|
|
d70252c1eb | ||
|
|
b57b94c63a | ||
|
|
9e914c140e | ||
|
|
5d5ceb2f52 | ||
|
|
bc0303c5da | ||
|
|
1240da4a6e | ||
|
|
4267bda853 | ||
|
|
db1ff1843c | ||
|
|
fe3c20b618 | ||
|
|
2fa93cba3a | ||
|
|
254fbd5a47 | ||
|
|
18f2318572 | ||
|
|
84417fc2b1 | ||
|
|
7f7fc737b3 | ||
|
|
2dc43bdfd3 | ||
|
|
95e39aa727 | ||
|
|
2c71f577e0 | ||
|
|
f987d32c72 | ||
|
|
cd7df86f54 | ||
|
|
cb8fa2583a | ||
|
|
3d3e5db81c | ||
|
|
c9860dc55e | ||
|
|
dbd5cf117a | ||
|
|
e805d6ebe3 | ||
|
|
01f469d91d | ||
|
|
e91cab0c6d | ||
|
|
106c3269a6 | ||
|
|
1628602860 | ||
|
|
ca0ab50c5e | ||
|
|
df0b7bb0fe | ||
|
|
fe59ac4986 | ||
|
|
25476bfcb2 | ||
|
|
6901fc493d | ||
|
|
c40417ff96 | ||
|
|
fd2d938528 | ||
|
|
cd20dea590 | ||
|
|
f921e98265 | ||
|
|
c0e905265c | ||
|
|
5e6a923c35 | ||
|
|
7618081e83 | ||
|
|
b903280cd0 | ||
|
|
5b60314e8b | ||
|
|
dfd34d2a5b | ||
|
|
98f6f0c80d | ||
|
|
8c65c60c27 | ||
|
|
bd0d9048e7 | ||
|
|
3b14be4fef | ||
|
|
05f7e123ed | ||
|
|
54d80ddea0 | ||
|
|
b9e0ad052f | ||
|
|
f8937e437a | ||
|
|
fbe9270528 | ||
|
|
58c3bc371d | ||
|
|
4683b0d120 | ||
|
|
5fb9bbdfa3 | ||
|
|
5883e5b920 | ||
|
|
b99957f54a | ||
|
|
21cb7fbca9 | ||
|
|
4ed5d4c2e7 | ||
|
|
8c3163f459 | ||
|
|
a11b6daa2e | ||
|
|
642ad5660d | ||
|
|
252d6ee6fd | ||
|
|
ba7b6b0f8b | ||
|
|
f2094a3010 | ||
|
|
b9ed7e2d20 | ||
|
|
6d3962acb6 | ||
|
|
32a0d38025 | ||
|
|
df08d51d2a | ||
|
|
d87c643e58 | ||
|
|
9e08f326be | ||
|
|
1f821d6e8b | ||
|
|
00fe4d4e41 | ||
|
|
f88561e713 | ||
|
|
dd193ffcec | ||
|
|
1e39a1b745 | ||
|
|
1084603375 | ||
|
|
3f9d949534 | ||
|
|
684deaed35 | ||
|
|
1b931fef20 | ||
|
|
d1976db149 | ||
|
|
a8fb17df9a | ||
|
|
8f28c80ef5 | ||
|
|
5a2c534fde | ||
|
|
e2304b2ce0 | ||
|
|
b87236ea20 | ||
|
|
dfbc9bfc53 | ||
|
|
f3ba051df4 | ||
|
|
affe39ff98 | ||
|
|
0f5d5e6caf | ||
|
|
2a66ac1db0 | ||
|
|
07308eedbd | ||
|
|
750b882546 | ||
|
|
1c09407e24 | ||
|
|
7e87591ae5 | ||
|
|
9e6c2bf3e0 | ||
|
|
c396cf8176 | ||
|
|
b19a037fac | ||
|
|
5cd4a36896 | ||
|
|
aec3531127 | ||
|
|
78434114be |
58
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a bug report, if you don't follow this template, your report will be DELETED
|
||||
title: ''
|
||||
labels: 'triage'
|
||||
assignees: 'dgtlmoon'
|
||||
|
||||
---
|
||||
|
||||
**DO NOT USE THIS FORM TO REPORT THAT A PARTICULAR WEBSITE IS NOT SCRAPING/WATCHING AS EXPECTED**
|
||||
|
||||
This form is only for direct bugs and feature requests todo directly with the software.
|
||||
|
||||
Please report watched websites (full URL and _any_ settings) that do not work with changedetection.io as expected [**IN THE DISCUSSION FORUMS**](https://github.com/dgtlmoon/changedetection.io/discussions) or your report will be deleted
|
||||
|
||||
CONSIDER TAKING OUT A SUBSCRIPTION FOR A SMALL PRICE PER MONTH, YOU GET THE BENEFIT OF USING OUR PAID PROXIES AND FURTHERING THE DEVELOPMENT OF CHANGEDETECTION.IO
|
||||
|
||||
THANK YOU
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Version**
|
||||
*Exact version* in the top right area: 0....
|
||||
|
||||
**To Reproduce**
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
! ALWAYS INCLUDE AN EXAMPLE URL WHERE IT IS POSSIBLE TO RE-CREATE THE ISSUE - USE THE 'SHARE WATCH' FEATURE AND PASTE IN THE SHARE-LINK!
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Desktop (please complete the following information):**
|
||||
- OS: [e.g. iOS]
|
||||
- Browser [e.g. chrome, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Smartphone (please complete the following information):**
|
||||
- Device: [e.g. iPhone6]
|
||||
- OS: [e.g. iOS8.1]
|
||||
- Browser [e.g. stock browser, safari]
|
||||
- Version [e.g. 22]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: '[feature]'
|
||||
labels: 'enhancement'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
**Version and OS**
|
||||
For example, 0.123 on linux/docker
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe the use-case and give concrete real-world examples**
|
||||
Attach any HTML/JSON, give links to sites, screenshots etc, we are not mind readers
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
130
.github/workflows/containers.yml
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
name: Build and push containers
|
||||
|
||||
on:
|
||||
# Automatically triggered by a testing workflow passing, but this is only checked when it lands in the `master`/default branch
|
||||
# workflow_run:
|
||||
# workflows: ["ChangeDetection.io Test"]
|
||||
# branches: [master]
|
||||
# tags: ['0.*']
|
||||
# types: [completed]
|
||||
|
||||
# Or a new tagged release
|
||||
release:
|
||||
types: [published, edited]
|
||||
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
metadata:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Show metadata
|
||||
run: |
|
||||
echo SHA ${{ github.sha }}
|
||||
echo github.ref: ${{ github.ref }}
|
||||
echo github_ref: $GITHUB_REF
|
||||
echo Event name: ${{ github.event_name }}
|
||||
echo Ref ${{ github.ref }}
|
||||
echo c: ${{ github.event.workflow_run.conclusion }}
|
||||
echo r: ${{ github.event.workflow_run }}
|
||||
echo tname: "${{ github.event.release.tag_name }}"
|
||||
echo headbranch: -${{ github.event.workflow_run.head_branch }}-
|
||||
set
|
||||
|
||||
build-push-containers:
|
||||
runs-on: ubuntu-latest
|
||||
# If the testing workflow has a success, then we build to :latest
|
||||
# Or if we are in a tagged release scenario.
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }} || ${{ github.event.release.tag_name }} != ''
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into changedetectionio/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > changedetectionio/source.txt
|
||||
echo ${{ github.ref }} > changedetectionio/tag.txt
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to Docker Hub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
# master branch -> :dev container tag
|
||||
- name: Build and push :dev
|
||||
id: docker_build
|
||||
if: ${{ github.ref }} == "refs/heads/master"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:dev,ghcr.io/${{ github.repository }}:dev
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
# A new tagged release is required, which builds :tag and :latest
|
||||
- name: Build and push :tag
|
||||
id: docker_build_tag_release
|
||||
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ github.event.release.tag_name }}
|
||||
ghcr.io/dgtlmoon/changedetection.io:${{ github.event.release.tag_name }}
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||
ghcr.io/dgtlmoon/changedetection.io:latest
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
91
.github/workflows/image-tag.yml
vendored
@@ -1,91 +0,0 @@
|
||||
name: Test, build and push tagged release to Docker Hub
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*.*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- uses: olegtarasov/get-tag@v2.1
|
||||
id: tagName
|
||||
|
||||
# with:
|
||||
# tagRegex: "foobar-(.*)" # Optional. Returns specified group text as tag name. Full tag string is returned if regex is not defined.
|
||||
# tagRegexGroup: 1 # Optional. Default is 1.
|
||||
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into backend/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > backend/source.txt
|
||||
echo ${{ github.ref }} > backend/tag.txt
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd backend; ./run_all_tests.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
- name: tag
|
||||
run : echo ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Build and push tagged version
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:${{ steps.tagName.outputs.tag }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
env:
|
||||
SOURCE_NAME: ${{ steps.branch_name.outputs.SOURCE_NAME }}
|
||||
SOURCE_BRANCH: ${{ steps.branch_name.outputs.SOURCE_BRANCH }}
|
||||
SOURCE_TAG: ${{ steps.branch_name.outputs.SOURCE_TAG }}
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
88
.github/workflows/image.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Test, build and push to Docker Hub
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, arm-build ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Create release metadata
|
||||
run: |
|
||||
# COPY'ed by Dockerfile into backend/ of the image, then read by the server in store.py
|
||||
echo ${{ github.sha }} > backend/source.txt
|
||||
echo ${{ github.ref }} > backend/tag.txt
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd backend; ./run_all_tests.sh
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
image: tonistiigi/binfmt:latest
|
||||
platforms: all
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
version: latest
|
||||
driver-opts: image=moby/buildkit:master
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_USERNAME }}/changedetection.io:latest
|
||||
# ${{ secrets.DOCKER_HUB_USERNAME }}:/changedetection.io:${{ env.RELEASE_VERSION }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v6,linux/arm/v7
|
||||
# platforms: linux/amd64
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Image digest
|
||||
run: echo step SHA ${{ steps.vars.outputs.sha_short }} tag ${{steps.vars.outputs.tag}} branch ${{steps.vars.outputs.branch}} digest ${{ steps.docker_build.outputs.digest }}
|
||||
|
||||
# failed: Cache service responded with 503
|
||||
# - name: Cache Docker layers
|
||||
# uses: actions/cache@v2
|
||||
# with:
|
||||
# path: /tmp/.buildx-cache
|
||||
# key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
# restore-keys: |
|
||||
# ${{ runner.os }}-buildx-
|
||||
|
||||
|
||||
44
.github/workflows/pypi.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: PyPi Test and Push tagged release
|
||||
|
||||
# Triggers the workflow on push or pull request events
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["ChangeDetection.io Test"]
|
||||
tags: '*.*'
|
||||
types: [completed]
|
||||
|
||||
|
||||
jobs:
|
||||
test-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip
|
||||
# pip install flake8 pytest
|
||||
# if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
# if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
|
||||
- name: Test that pip builds without error
|
||||
run: |
|
||||
pip3 --version
|
||||
python3 -m pip install wheel
|
||||
python3 setup.py bdist_wheel
|
||||
python3 -m pip install dist/changedetection.io-*-none-any.whl --force
|
||||
changedetection.io -d /tmp -p 10000 &
|
||||
sleep 3
|
||||
curl http://127.0.0.1:10000/static/styles/pure-min.css >/dev/null
|
||||
killall -9 changedetection.io
|
||||
|
||||
# https://github.com/docker/build-push-action/blob/master/docs/advanced/test-before-push.md ?
|
||||
# https://github.com/docker/buildx/issues/59 ? Needs to be one platform?
|
||||
|
||||
# https://github.com/docker/buildx/issues/495#issuecomment-918925854
|
||||
#if: ${{ github.event_name == 'release'}}
|
||||
18
.github/workflows/test-only.yml
vendored
@@ -4,7 +4,7 @@ name: ChangeDetection.io Test
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
test-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
@@ -14,20 +14,32 @@ jobs:
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Show env vars
|
||||
run: set
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
|
||||
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
|
||||
- name: Unit tests
|
||||
run: |
|
||||
python3 -m unittest changedetectionio.tests.unit.test_notification_diff
|
||||
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
# Each test is totally isolated and performs its own cleanup/reset
|
||||
cd backend; ./run_all_tests.sh
|
||||
cd changedetectionio; ./run_all_tests.sh
|
||||
|
||||
# https://github.com/docker/build-push-action/blob/master/docs/advanced/test-before-push.md ?
|
||||
# https://github.com/docker/buildx/issues/59 ? Needs to be one platform?
|
||||
|
||||
# https://github.com/docker/buildx/issues/495#issuecomment-918925854
|
||||
|
||||
6
.gitignore
vendored
@@ -5,3 +5,9 @@ datastore/url-watches.json
|
||||
datastore/*
|
||||
__pycache__
|
||||
.pytest_cache
|
||||
build
|
||||
dist
|
||||
venv
|
||||
test-datastore
|
||||
*.egg-info*
|
||||
.vscode/settings.json
|
||||
|
||||
15
CONTRIBUTING.md
Normal file
@@ -0,0 +1,15 @@
|
||||
Contributing is always welcome!
|
||||
|
||||
I am no professional flask developer, if you know a better way that something can be done, please let me know!
|
||||
|
||||
Otherwise, it's always best to PR into the `dev` branch.
|
||||
|
||||
Please be sure that all new functionality has a matching test!
|
||||
|
||||
Use `pytest` to validate/test, you can run the existing tests as `pytest tests/test_notifications.py` for example
|
||||
|
||||
```
|
||||
pip3 install -r requirements-dev
|
||||
```
|
||||
|
||||
this is from https://github.com/dgtlmoon/changedetection.io/blob/master/requirements-dev.txt
|
||||
14
Dockerfile
@@ -12,7 +12,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libxslt-dev \
|
||||
zlib1g-dev \
|
||||
g++
|
||||
|
||||
|
||||
RUN mkdir /install
|
||||
WORKDIR /install
|
||||
|
||||
@@ -20,6 +20,11 @@ COPY requirements.txt /requirements.txt
|
||||
|
||||
RUN pip install --target=/dependencies -r /requirements.txt
|
||||
|
||||
# Playwright is an alternative to Selenium
|
||||
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing
|
||||
RUN pip install --target=/dependencies playwright~=1.24 \
|
||||
|| echo "WARN: Failed to install Playwright. The application can still run, but the Playwright option will be disabled."
|
||||
|
||||
# Final image stage
|
||||
FROM python:3.8-slim
|
||||
|
||||
@@ -42,12 +47,17 @@ ENV PYTHONUNBUFFERED=1
|
||||
|
||||
RUN [ ! -d "/datastore" ] && mkdir /datastore
|
||||
|
||||
# Re #80, sets SECLEVEL=1 in openssl.conf to allow monitoring sites with weak/old cipher suites
|
||||
RUN sed -i 's/^CipherString = .*/CipherString = DEFAULT@SECLEVEL=1/' /etc/ssl/openssl.cnf
|
||||
|
||||
# Copy modules over to the final image and add their dir to PYTHONPATH
|
||||
COPY --from=builder /dependencies /usr/local
|
||||
ENV PYTHONPATH=/usr/local
|
||||
|
||||
EXPOSE 5000
|
||||
|
||||
# The actual flask app
|
||||
COPY backend /app/backend
|
||||
COPY changedetectionio /app/changedetectionio
|
||||
# The eventlet server wrapper
|
||||
COPY changedetection.py /app/changedetection.py
|
||||
|
||||
|
||||
8
MANIFEST.in
Normal file
@@ -0,0 +1,8 @@
|
||||
recursive-include changedetectionio/api *
|
||||
recursive-include changedetectionio/templates *
|
||||
recursive-include changedetectionio/static *
|
||||
recursive-include changedetectionio/model *
|
||||
include changedetection.py
|
||||
global-exclude *.pyc
|
||||
global-exclude node_modules
|
||||
global-exclude venv
|
||||
1
Procfile
Normal file
@@ -0,0 +1 @@
|
||||
web: python3 ./changedetection.py -C -d ./datastore -p $PORT
|
||||
58
README-pip.md
Normal file
@@ -0,0 +1,58 @@
|
||||
## Web Site Change Detection, Monitoring and Notification.
|
||||
|
||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=pip)
|
||||
|
||||
|
||||
[**Don't have time? Let us host it for you! try our extremely affordable subscription use our proxies and support!**](https://lemonade.changedetection.io/start)
|
||||
|
||||
|
||||
#### Example use cases
|
||||
|
||||
- Products and services have a change in pricing
|
||||
- _Out of stock notification_ and _Back In stock notification_
|
||||
- Governmental department updates (changes are often only on their websites)
|
||||
- New software releases, security advisories when you're not on their mailing list.
|
||||
- Festivals with changes
|
||||
- Realestate listing changes
|
||||
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
||||
- COVID related news from government websites
|
||||
- University/organisation news from their website
|
||||
- Detect and monitor changes in JSON API responses
|
||||
- JSON API monitoring and alerting
|
||||
- Changes in legal and other documents
|
||||
- Trigger API calls via notifications when text appears on a website
|
||||
- Glue together APIs using the JSON filter and JSON notifications
|
||||
- Create RSS feeds based on changes in web content
|
||||
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||
|
||||
#### Key Features
|
||||
|
||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JsonPath rules
|
||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||
- Easily specify how often a site should be checked
|
||||
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
||||
- Override Request Headers, Specify `POST` or `GET` and other methods
|
||||
- Use the "Visual Selector" to help target specific elements
|
||||
|
||||
|
||||
```bash
|
||||
$ pip3 install changedetection.io
|
||||
```
|
||||
|
||||
Specify a target for the *datastore path* with `-d` (required) and a *listening port* with `-p` (defaults to `5000`)
|
||||
|
||||
```bash
|
||||
$ changedetection.io -d /path/to/empty/data/dir -p 5000
|
||||
```
|
||||
|
||||
|
||||
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
|
||||
See https://github.com/dgtlmoon/changedetection.io for more information.
|
||||
|
||||
197
README.md
@@ -1,74 +1,132 @@
|
||||
# changedetection.io
|
||||
## Web Site Change Detection, Monitoring and Notification.
|
||||
|
||||
Live your data-life pro-actively, track website content changes and receive notifications via Discord, Email, Slack, Telegram and 70+ more
|
||||
|
||||
[<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />](https://lemonade.changedetection.io/start?src=github)
|
||||
|
||||
[![Release Version][release-shield]][release-link] [![Docker Pulls][docker-pulls]][docker-link] [![License][license-shield]](LICENSE.md)
|
||||
|
||||

|
||||
<a href="https://hub.docker.com/r/dgtlmoon/changedetection.io" target="_blank" title="Change detection docker hub">
|
||||
<img src="https://img.shields.io/docker/pulls/dgtlmoon/changedetection.io" alt="Docker Pulls"/>
|
||||
</a>
|
||||
<a href="https://hub.docker.com/r/dgtlmoon/changedetection.io" target="_blank" title="Change detection docker hub">
|
||||
<img src="https://img.shields.io/github/v/release/dgtlmoon/changedetection.io" alt="Change detection latest tag version"/>
|
||||
</a>
|
||||
|
||||
## Self-hosted change monitoring of web pages.
|
||||
Know when important content changes, we support notifications via Discord, Telegram, Home-Assistant, Slack, Email and 70+ more
|
||||
|
||||
_Know when web pages change! Stay ontop of new information!_
|
||||
|
||||
Live your data-life *pro-actively* instead of *re-actively*, do not rely on manipulative social media for consuming important information.
|
||||
[**Don't have time? Let us host it for you! try our $6.99/month subscription - use our proxies and support!**](https://lemonade.changedetection.io/start) , _half the price of other website change monitoring services and comes with unlimited watches & checks!_
|
||||
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot.png" style="max-width:100%;" alt="Self-hosted web page change monitoring" title="Self-hosted web page change monitoring" />
|
||||
|
||||
- Automatic Updates, Automatic Backups, No Heroku "paused application", don't miss a change!
|
||||
- Javascript browser included
|
||||
- Unlimited checks and watches!
|
||||
|
||||
|
||||
#### Example use cases
|
||||
|
||||
Know when ...
|
||||
|
||||
- Government department updates (changes are often only on their websites)
|
||||
- Local government news (changes are often only on their websites)
|
||||
- Products and services have a change in pricing
|
||||
- _Out of stock notification_ and _Back In stock notification_
|
||||
- Governmental department updates (changes are often only on their websites)
|
||||
- New software releases, security advisories when you're not on their mailing list.
|
||||
- Festivals with changes
|
||||
- Realestate listing changes
|
||||
- Know when your favourite whiskey is on sale, or other special deals are announced before anyone else
|
||||
- COVID related news from government websites
|
||||
- University/organisation news from their website
|
||||
- Detect and monitor changes in JSON API responses
|
||||
- API monitoring and alerting
|
||||
- JSON API monitoring and alerting
|
||||
- Changes in legal and other documents
|
||||
- Trigger API calls via notifications when text appears on a website
|
||||
- Glue together APIs using the JSON filter and JSON notifications
|
||||
- Create RSS feeds based on changes in web content
|
||||
- Monitor HTML source code for unexpected changes, strengthen your PCI compliance
|
||||
- You have a very sensitive list of URLs to watch and you do _not_ want to use the paid alternatives. (Remember, _you_ are the product)
|
||||
|
||||
_Need an actual Chrome runner with Javascript support? see the experimental <a href="https://github.com/dgtlmoon/changedetection.io/tree/javascript-browser">Javascript/Chrome support changedetection.io branch!</a>_
|
||||
_Need an actual Chrome runner with Javascript support? We support fetching via WebDriver and Playwright!</a>_
|
||||
|
||||
**Get monitoring now! super simple, one command!**
|
||||
Run the python code on your own machine by cloning this repository, or with <a href="https://docs.docker.com/get-docker/">docker</a> and/or <a href="https://www.digitalocean.com/community/tutorial_collections/how-to-install-docker-compose">docker-compose</a>
|
||||
#### Key Features
|
||||
|
||||
With one docker-compose command
|
||||
- Lots of trigger filters, such as "Trigger on text", "Remove text by selector", "Ignore text", "Extract text", also using regular-expressions!
|
||||
- Target elements with xPath and CSS Selectors, Easily monitor complex JSON with JsonPath rules
|
||||
- Switch between fast non-JS and Chrome JS based "fetchers"
|
||||
- Easily specify how often a site should be checked
|
||||
- Execute JS before extracting text (Good for logging in, see examples in the UI!)
|
||||
- Override Request Headers, Specify `POST` or `GET` and other methods
|
||||
- Use the "Visual Selector" to help target specific elements
|
||||
|
||||
|
||||
## Screenshots
|
||||
|
||||
### Examine differences in content.
|
||||
|
||||
Easily see what changed, examine by word, line, or individual character.
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
|
||||
### Filter by elements using the Visual Selector tool.
|
||||
|
||||
Available when connected to a <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Playwright-content-fetcher">playwright content fetcher</a> (included as part of our subscription service)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/visualselector-anim.gif" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
|
||||
## Installation
|
||||
|
||||
### Docker
|
||||
|
||||
With Docker composer, just clone this repository and..
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
$ docker-compose up -d
|
||||
```
|
||||
|
||||
or
|
||||
Docker standalone
|
||||
```bash
|
||||
$ docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
|
||||
`:latest` tag is our latest stable release, `:dev` tag is our bleeding edge `master` branch.
|
||||
|
||||
### Windows
|
||||
|
||||
See the install instructions at the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Microsoft-Windows
|
||||
|
||||
### Python Pip
|
||||
|
||||
Check out our pypi page https://pypi.org/project/changedetection.io/
|
||||
|
||||
```bash
|
||||
docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
$ pip3 install changedetection.io
|
||||
$ changedetection.io -d /path/to/empty/data/dir -p 5000
|
||||
```
|
||||
|
||||
Now visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
Then visit http://127.0.0.1:5000 , You should now be able to access the UI.
|
||||
|
||||
#### Updating to latest version
|
||||
_Now with per-site configurable support for using a fast built in HTTP fetcher or use a Chrome based fetcher for monitoring of JavaScript websites!_
|
||||
|
||||
Highly recommended :)
|
||||
## Updating changedetection.io
|
||||
|
||||
```bash
|
||||
### Docker
|
||||
```
|
||||
docker pull dgtlmoon/changedetection.io
|
||||
docker kill $(docker ps -a|grep changedetection.io|awk '{print $1}')
|
||||
docker rm $(docker ps -a|grep changedetection.io|awk '{print $1}')
|
||||
docker run -d --restart always -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
|
||||
### Screenshots
|
||||
|
||||
Examining differences in content.
|
||||
### docker-compose
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot-diff.png" style="max-width:100%;" alt="Self-hosted web page change monitoring context difference " title="Self-hosted web page change monitoring context difference " />
|
||||
```bash
|
||||
docker-compose pull && docker-compose up -d
|
||||
```
|
||||
|
||||
Please :star: star :star: this project and help it grow! https://github.com/dgtlmoon/changedetection.io/
|
||||
See the wiki for more information https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
### Notifications
|
||||
|
||||
## Filters
|
||||
XPath, JSONPath and CSS support comes baked in! You can be as specific as you need, use XPath exported from various XPath element query creation tools.
|
||||
|
||||
(We support LXML `re:test`, `re:math` and `re:replace`.)
|
||||
|
||||
## Notifications
|
||||
|
||||
ChangeDetection.io supports a massive amount of notifications (including email, office365, custom APIs, etc) when a web-page has a change detected thanks to the <a href="https://github.com/caronc/apprise">apprise</a> library.
|
||||
Simply set one or more notification URL's in the _[edit]_ tab of that watch.
|
||||
@@ -86,60 +144,67 @@ Just some examples
|
||||
json://someserver.com/custom-api
|
||||
syslog://
|
||||
|
||||
<a href="https://github.com/caronc/apprise">And everything else in this list!</a>
|
||||
<a href="https://github.com/caronc/apprise#popular-notification-services">And everything else in this list!</a>
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/screenshot-notifications.png" style="max-width:100%;" alt="Self-hosted web page change monitoring notifications" title="Self-hosted web page change monitoring notifications" />
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/screenshot-notifications.png" style="max-width:100%;" alt="Self-hosted web page change monitoring notifications" title="Self-hosted web page change monitoring notifications" />
|
||||
|
||||
Now you can also customise your notification content!
|
||||
|
||||
### JSON API Monitoring
|
||||
## JSON API Monitoring
|
||||
|
||||
Detect changes and monitor data in JSON API's by using the built-in JSONPath selectors as a filter / selector.
|
||||
|
||||

|
||||

|
||||
|
||||
This will re-parse the JSON and apply formatting to the text, making it super easy to monitor and detect changes in JSON API results
|
||||
|
||||

|
||||

|
||||
|
||||
### Proxy
|
||||
### Parse JSON embedded in HTML!
|
||||
|
||||
A proxy for ChangeDetection.io can be configured by setting environment the
|
||||
`HTTP_PROXY`, `HTTPS_PROXY` variables, examples are also in the `docker-compose.yml`
|
||||
|
||||
`NO_PROXY` exclude list can be specified by following `"localhost,192.168.0.0/24"`
|
||||
|
||||
as `docker run` with `-e`
|
||||
When you enable a `json:` filter, you can even automatically extract and parse embedded JSON inside a HTML page! Amazingly handy for sites that build content based on JSON, such as many e-commerce websites.
|
||||
|
||||
```
|
||||
docker run -d --restart always -e HTTPS_PROXY="socks5h://10.10.1.10:1080" -p "127.0.0.1:5000:5000" -v datastore-volume:/datastore --name changedetection.io dgtlmoon/changedetection.io
|
||||
```
|
||||
<html>
|
||||
...
|
||||
<script type="application/ld+json">
|
||||
{"@context":"http://schema.org","@type":"Product","name":"Nan Optipro Stage 1 Baby Formula 800g","price": 23.50 }
|
||||
</script>
|
||||
```
|
||||
|
||||
With `docker-compose`, see the `Proxy support example` in <a href="https://github.com/dgtlmoon/changedetection.io/blob/master/docker-compose.yml">docker-compose.yml</a>.
|
||||
`json:$.price` would give `23.50`, or you can extract the whole structure
|
||||
|
||||
For more information see https://docs.python-requests.org/en/master/user/advanced/#proxies
|
||||
## Proxy configuration
|
||||
|
||||
This proxy support also extends to the notifications https://github.com/caronc/apprise/issues/387#issuecomment-841718867
|
||||
See the wiki https://github.com/dgtlmoon/changedetection.io/wiki/Proxy-configuration
|
||||
|
||||
### Notes
|
||||
## Raspberry Pi support?
|
||||
|
||||
- ~~Does not yet support Javascript~~
|
||||
- ~~Wont work with Cloudfare type "Please turn on javascript" protected pages~~
|
||||
- You can use the 'headers' section to monitor password protected web page changes
|
||||
|
||||
See the experimental <a href="https://github.com/dgtlmoon/changedetection.io/tree/javascript-browser">Javascript/Chrome browser support!</a>
|
||||
|
||||
### RaspberriPi support?
|
||||
|
||||
RaspberriPi and linux/arm/v6 linux/arm/v7 arm64 devices are supported!
|
||||
Raspberry Pi and linux/arm/v6 linux/arm/v7 arm64 devices are supported! See the wiki for [details](https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver)
|
||||
|
||||
|
||||
### Support us
|
||||
## Support us
|
||||
|
||||
Do you use changedetection.io to make money? does it save you time or money? Does it make your life easier? less stressful? Remember, we write this software when we should be doing actual paid work, we have to buy food and pay rent just like you.
|
||||
|
||||
Please support us, even small amounts help a LOT.
|
||||
|
||||
BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
Firstly, consider taking out a [change detection monthly subscription - unlimited checks and watches](https://lemonade.changedetection.io/start) , even if you don't use it, you still get the warm fuzzy feeling of helping out the project. (And who knows, you might just use it!)
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/btc-support.png" style="max-width:50%;" alt="Support us!" />
|
||||
Or directly donate an amount PayPal [](https://www.paypal.com/donate/?hosted_button_id=7CP6HR9ZCNDYJ)
|
||||
|
||||
Or BTC `1PLFN327GyUarpJd7nVe7Reqg9qHx5frNn`
|
||||
|
||||
<img src="https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/docs/btc-support.png" style="max-width:50%;" alt="Support us!" />
|
||||
|
||||
## Commercial Support
|
||||
|
||||
I offer commercial support, this software is depended on by network security, aerospace , data-science and data-journalist professionals just to name a few, please reach out at dgtlmoon@gmail.com for any enquiries, I am more than glad to work with your organisation to further the possibilities of what can be done with changedetection.io
|
||||
|
||||
|
||||
[release-shield]: https://img.shields.io:/github/v/release/dgtlmoon/changedetection.io?style=for-the-badge
|
||||
[docker-pulls]: https://img.shields.io/docker/pulls/dgtlmoon/changedetection.io?style=for-the-badge
|
||||
[test-shield]: https://github.com/dgtlmoon/changedetection.io/actions/workflows/test-only.yml/badge.svg?branch=master
|
||||
|
||||
[license-shield]: https://img.shields.io/github/license/dgtlmoon/changedetection.io.svg?style=for-the-badge
|
||||
[release-link]: https://github.com/dgtlmoon.com/changedetection.io/releases
|
||||
[docker-link]: https://hub.docker.com/r/dgtlmoon/changedetection.io
|
||||
|
||||
21
app.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "ChangeDetection.io",
|
||||
"description": "The best and simplest self-hosted open source website change detection monitoring and notification service.",
|
||||
"keywords": [
|
||||
"changedetection",
|
||||
"website monitoring"
|
||||
],
|
||||
"repository": "https://github.com/dgtlmoon/changedetection.io",
|
||||
"success_url": "/",
|
||||
"scripts": {
|
||||
},
|
||||
"env": {
|
||||
},
|
||||
"formation": {
|
||||
"web": {
|
||||
"quantity": 1,
|
||||
"size": "free"
|
||||
}
|
||||
},
|
||||
"image": "heroku/python"
|
||||
}
|
||||
@@ -1,880 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
|
||||
# @todo logging
|
||||
# @todo extra options for url like , verify=False etc.
|
||||
# @todo enable https://urllib3.readthedocs.io/en/latest/user-guide.html#ssl as option?
|
||||
# @todo option for interval day/6 hour/etc
|
||||
# @todo on change detected, config for calling some API
|
||||
# @todo fetch title into json
|
||||
# https://distill.io/features
|
||||
# proxy per check
|
||||
# - flask_cors, itsdangerous,MarkupSafe
|
||||
|
||||
import time
|
||||
import os
|
||||
import timeago
|
||||
import flask_login
|
||||
from flask_login import login_required
|
||||
|
||||
import threading
|
||||
from threading import Event
|
||||
|
||||
import queue
|
||||
|
||||
from flask import Flask, render_template, request, send_from_directory, abort, redirect, url_for, flash
|
||||
|
||||
from feedgen.feed import FeedGenerator
|
||||
from flask import make_response
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
datastore = None
|
||||
|
||||
# Local
|
||||
running_update_threads = []
|
||||
ticker_thread = None
|
||||
|
||||
extra_stylesheets = []
|
||||
|
||||
update_q = queue.Queue()
|
||||
|
||||
notification_q = queue.Queue()
|
||||
|
||||
app = Flask(__name__, static_url_path="/var/www/change-detection/backend/static")
|
||||
|
||||
# Stop browser caching of assets
|
||||
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
|
||||
|
||||
app.config.exit = Event()
|
||||
|
||||
app.config['NEW_VERSION_AVAILABLE'] = False
|
||||
|
||||
app.config['LOGIN_DISABLED'] = False
|
||||
|
||||
#app.config["EXPLAIN_TEMPLATE_LOADING"] = True
|
||||
|
||||
# Disables caching of the templates
|
||||
app.config['TEMPLATES_AUTO_RELOAD'] = True
|
||||
|
||||
|
||||
def init_app_secret(datastore_path):
|
||||
secret = ""
|
||||
|
||||
path = "{}/secret.txt".format(datastore_path)
|
||||
|
||||
try:
|
||||
with open(path, "r") as f:
|
||||
secret = f.read()
|
||||
|
||||
except FileNotFoundError:
|
||||
import secrets
|
||||
with open(path, "w") as f:
|
||||
secret = secrets.token_hex(32)
|
||||
f.write(secret)
|
||||
|
||||
return secret
|
||||
|
||||
# Remember python is by reference
|
||||
# populate_form in wtfors didnt work for me. (try using a setattr() obj type on datastore.watch?)
|
||||
def populate_form_from_watch(form, watch):
|
||||
for i in form.__dict__.keys():
|
||||
if i[0] != '_':
|
||||
p = getattr(form, i)
|
||||
if hasattr(p, 'data') and i in watch:
|
||||
if not p.data:
|
||||
setattr(p, "data", watch[i])
|
||||
|
||||
|
||||
# We use the whole watch object from the store/JSON so we can see if there's some related status in terms of a thread
|
||||
# running or something similar.
|
||||
@app.template_filter('format_last_checked_time')
|
||||
def _jinja2_filter_datetime(watch_obj, format="%Y-%m-%d %H:%M:%S"):
|
||||
# Worker thread tells us which UUID it is currently processing.
|
||||
for t in running_update_threads:
|
||||
if t.current_uuid == watch_obj['uuid']:
|
||||
return "Checking now.."
|
||||
|
||||
if watch_obj['last_checked'] == 0:
|
||||
return 'Not yet'
|
||||
|
||||
return timeago.format(int(watch_obj['last_checked']), time.time())
|
||||
|
||||
|
||||
# @app.context_processor
|
||||
# def timeago():
|
||||
# def _timeago(lower_time, now):
|
||||
# return timeago.format(lower_time, now)
|
||||
# return dict(timeago=_timeago)
|
||||
|
||||
@app.template_filter('format_timestamp_timeago')
|
||||
def _jinja2_filter_datetimestamp(timestamp, format="%Y-%m-%d %H:%M:%S"):
|
||||
return timeago.format(timestamp, time.time())
|
||||
# return timeago.format(timestamp, time.time())
|
||||
# return datetime.datetime.utcfromtimestamp(timestamp).strftime(format)
|
||||
|
||||
|
||||
class User(flask_login.UserMixin):
|
||||
id=None
|
||||
|
||||
def set_password(self, password):
|
||||
return True
|
||||
def get_user(self, email="defaultuser@changedetection.io"):
|
||||
return self
|
||||
def is_authenticated(self):
|
||||
|
||||
return True
|
||||
def is_active(self):
|
||||
return True
|
||||
def is_anonymous(self):
|
||||
return False
|
||||
def get_id(self):
|
||||
return str(self.id)
|
||||
|
||||
def check_password(self, password):
|
||||
|
||||
import hashlib
|
||||
import base64
|
||||
|
||||
# Getting the values back out
|
||||
raw_salt_pass = base64.b64decode(datastore.data['settings']['application']['password'])
|
||||
salt_from_storage = raw_salt_pass[:32] # 32 is the length of the salt
|
||||
|
||||
# Use the exact same setup you used to generate the key, but this time put in the password to check
|
||||
new_key = hashlib.pbkdf2_hmac(
|
||||
'sha256',
|
||||
password.encode('utf-8'), # Convert the password to bytes
|
||||
salt_from_storage,
|
||||
100000
|
||||
)
|
||||
new_key = salt_from_storage + new_key
|
||||
|
||||
return new_key == raw_salt_pass
|
||||
|
||||
pass
|
||||
|
||||
def changedetection_app(config=None, datastore_o=None):
|
||||
global datastore
|
||||
datastore = datastore_o
|
||||
|
||||
app.config.update(dict(DEBUG=True))
|
||||
#app.config.update(config or {})
|
||||
|
||||
login_manager = flask_login.LoginManager(app)
|
||||
login_manager.login_view = 'login'
|
||||
app.secret_key = init_app_secret(config['datastore_path'])
|
||||
|
||||
# Setup cors headers to allow all domains
|
||||
# https://flask-cors.readthedocs.io/en/latest/
|
||||
# CORS(app)
|
||||
|
||||
@login_manager.user_loader
|
||||
def user_loader(email):
|
||||
user = User()
|
||||
user.get_user(email)
|
||||
return user
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
def unauthorized_handler():
|
||||
# @todo validate its a URL of this host and use that
|
||||
return redirect(url_for('login', next=url_for('index')))
|
||||
|
||||
@app.route('/logout')
|
||||
def logout():
|
||||
flask_login.logout_user()
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# https://github.com/pallets/flask/blob/93dd1709d05a1cf0e886df6223377bdab3b077fb/examples/tutorial/flaskr/__init__.py#L39
|
||||
# You can divide up the stuff like this
|
||||
@app.route('/login', methods=['GET', 'POST'])
|
||||
def login():
|
||||
|
||||
if not datastore.data['settings']['application']['password']:
|
||||
flash("Login not required, no password enabled.", "notice")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
if request.method == 'GET':
|
||||
output = render_template("login.html")
|
||||
return output
|
||||
|
||||
user = User()
|
||||
user.id = "defaultuser@changedetection.io"
|
||||
|
||||
password = request.form.get('password')
|
||||
|
||||
if (user.check_password(password)):
|
||||
flask_login.login_user(user, remember=True)
|
||||
next = request.args.get('next')
|
||||
# if not is_safe_url(next):
|
||||
# return flask.abort(400)
|
||||
return redirect(next or url_for('index'))
|
||||
|
||||
else:
|
||||
flash('Incorrect password', 'error')
|
||||
|
||||
return redirect(url_for('login'))
|
||||
|
||||
@app.before_request
|
||||
def do_something_whenever_a_request_comes_in():
|
||||
# Disable password loginif there is not one set
|
||||
app.config['LOGIN_DISABLED'] = datastore.data['settings']['application']['password'] == False
|
||||
|
||||
@app.route("/", methods=['GET'])
|
||||
@login_required
|
||||
def index():
|
||||
limit_tag = request.args.get('tag')
|
||||
pause_uuid = request.args.get('pause')
|
||||
|
||||
if pause_uuid:
|
||||
try:
|
||||
datastore.data['watching'][pause_uuid]['paused'] ^= True
|
||||
datastore.needs_write = True
|
||||
|
||||
return redirect(url_for('index', tag = limit_tag))
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
# Sort by last_changed and add the uuid which is usually the key..
|
||||
sorted_watches = []
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
|
||||
if limit_tag != None:
|
||||
# Support for comma separated list of tags.
|
||||
for tag_in_watch in watch['tag'].split(','):
|
||||
tag_in_watch = tag_in_watch.strip()
|
||||
if tag_in_watch == limit_tag:
|
||||
watch['uuid'] = uuid
|
||||
sorted_watches.append(watch)
|
||||
|
||||
else:
|
||||
watch['uuid'] = uuid
|
||||
sorted_watches.append(watch)
|
||||
|
||||
sorted_watches.sort(key=lambda x: x['last_changed'], reverse=True)
|
||||
|
||||
existing_tags = datastore.get_all_tags()
|
||||
rss = request.args.get('rss')
|
||||
|
||||
if rss:
|
||||
fg = FeedGenerator()
|
||||
fg.title('changedetection.io')
|
||||
fg.description('Feed description')
|
||||
fg.link(href='https://changedetection.io')
|
||||
|
||||
for watch in sorted_watches:
|
||||
if not watch['viewed']:
|
||||
fe = fg.add_entry()
|
||||
fe.title(watch['url'])
|
||||
fe.link(href=watch['url'])
|
||||
fe.description(watch['url'])
|
||||
fe.guid(watch['uuid'], permalink=False)
|
||||
dt = datetime.datetime.fromtimestamp(int(watch['newest_history_key']))
|
||||
dt = dt.replace(tzinfo=pytz.UTC)
|
||||
fe.pubDate(dt)
|
||||
|
||||
response = make_response(fg.rss_str())
|
||||
response.headers.set('Content-Type', 'application/rss+xml')
|
||||
return response
|
||||
|
||||
else:
|
||||
from backend import forms
|
||||
form = forms.quickWatchForm(request.form)
|
||||
|
||||
output = render_template("watch-overview.html",
|
||||
form=form,
|
||||
watches=sorted_watches,
|
||||
tags=existing_tags,
|
||||
active_tag=limit_tag,
|
||||
has_unviewed=datastore.data['has_unviewed'])
|
||||
|
||||
return output
|
||||
|
||||
@app.route("/scrub", methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def scrub_page():
|
||||
|
||||
import re
|
||||
|
||||
if request.method == 'POST':
|
||||
confirmtext = request.form.get('confirmtext')
|
||||
limit_date = request.form.get('limit_date')
|
||||
|
||||
try:
|
||||
limit_date = limit_date.replace('T', ' ')
|
||||
# I noticed chrome will show '/' but actually submit '-'
|
||||
limit_date = limit_date.replace('-', '/')
|
||||
# In the case that :ss seconds are supplied
|
||||
limit_date = re.sub('(\d\d:\d\d)(:\d\d)', '\\1', limit_date)
|
||||
|
||||
str_to_dt = datetime.datetime.strptime(limit_date, '%Y/%m/%d %H:%M')
|
||||
limit_timestamp = int(str_to_dt.timestamp())
|
||||
|
||||
if limit_timestamp > time.time():
|
||||
flash("Timestamp is in the future, cannot continue.", 'error')
|
||||
return redirect(url_for('scrub_page'))
|
||||
|
||||
except ValueError:
|
||||
flash('Incorrect date format, cannot continue.', 'error')
|
||||
return redirect(url_for('scrub_page'))
|
||||
|
||||
if confirmtext == 'scrub':
|
||||
changes_removed = 0
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
if limit_timestamp:
|
||||
changes_removed += datastore.scrub_watch(uuid, limit_timestamp=limit_timestamp)
|
||||
else:
|
||||
changes_removed += datastore.scrub_watch(uuid)
|
||||
|
||||
flash("Cleared snapshot history ({} snapshots removed)".format(changes_removed))
|
||||
else:
|
||||
flash('Incorrect confirmation text.', 'error')
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
output = render_template("scrub.html")
|
||||
return output
|
||||
|
||||
|
||||
# If they edited an existing watch, we need to know to reset the current/previous md5 to include
|
||||
# the excluded text.
|
||||
def get_current_checksum_include_ignore_text(uuid):
|
||||
|
||||
import hashlib
|
||||
from backend import fetch_site_status
|
||||
|
||||
# Get the most recent one
|
||||
newest_history_key = datastore.get_val(uuid, 'newest_history_key')
|
||||
|
||||
# 0 means that theres only one, so that there should be no 'unviewed' history availabe
|
||||
if newest_history_key == 0:
|
||||
newest_history_key = list(datastore.data['watching'][uuid]['history'].keys())[0]
|
||||
|
||||
if newest_history_key:
|
||||
with open(datastore.data['watching'][uuid]['history'][newest_history_key],
|
||||
encoding='utf-8') as file:
|
||||
raw_content = file.read()
|
||||
|
||||
handler = fetch_site_status.perform_site_check(datastore=datastore)
|
||||
stripped_content = handler.strip_ignore_text(raw_content,
|
||||
datastore.data['watching'][uuid]['ignore_text'])
|
||||
|
||||
checksum = hashlib.md5(stripped_content).hexdigest()
|
||||
return checksum
|
||||
|
||||
return datastore.data['watching'][uuid]['previous_md5']
|
||||
|
||||
|
||||
@app.route("/edit/<string:uuid>", methods=['GET', 'POST'])
|
||||
@login_required
|
||||
def edit_page(uuid):
|
||||
from backend import forms
|
||||
form = forms.watchForm(request.form)
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
if request.method == 'GET':
|
||||
if not uuid in datastore.data['watching']:
|
||||
flash("No watch with the UUID %s found." % (uuid), "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
populate_form_from_watch(form, datastore.data['watching'][uuid])
|
||||
|
||||
if request.method == 'POST' and form.validate():
|
||||
|
||||
# Re #110, if they submit the same as the default value, set it to None, so we continue to follow the default
|
||||
if form.minutes_between_check.data == datastore.data['settings']['requests']['minutes_between_check']:
|
||||
form.minutes_between_check.data = None
|
||||
|
||||
update_obj = {'url': form.url.data.strip(),
|
||||
'minutes_between_check': form.minutes_between_check.data,
|
||||
'tag': form.tag.data.strip(),
|
||||
'title': form.title.data.strip(),
|
||||
'headers': form.headers.data
|
||||
}
|
||||
|
||||
# Notification URLs
|
||||
datastore.data['watching'][uuid]['notification_urls'] = form.notification_urls.data
|
||||
|
||||
# Ignore text
|
||||
form_ignore_text = form.ignore_text.data
|
||||
datastore.data['watching'][uuid]['ignore_text'] = form_ignore_text
|
||||
|
||||
# Reset the previous_md5 so we process a new snapshot including stripping ignore text.
|
||||
if form_ignore_text:
|
||||
if len(datastore.data['watching'][uuid]['history']):
|
||||
update_obj['previous_md5'] = get_current_checksum_include_ignore_text(uuid=uuid)
|
||||
|
||||
|
||||
datastore.data['watching'][uuid]['css_filter'] = form.css_filter.data.strip()
|
||||
|
||||
# Reset the previous_md5 so we process a new snapshot including stripping ignore text.
|
||||
if form.css_filter.data.strip() != datastore.data['watching'][uuid]['css_filter']:
|
||||
if len(datastore.data['watching'][uuid]['history']):
|
||||
update_obj['previous_md5'] = get_current_checksum_include_ignore_text(uuid=uuid)
|
||||
|
||||
|
||||
datastore.data['watching'][uuid].update(update_obj)
|
||||
datastore.needs_write = True
|
||||
flash("Updated watch.")
|
||||
|
||||
# Queue the watch for immediate recheck
|
||||
update_q.put(uuid)
|
||||
|
||||
if form.trigger_check.data:
|
||||
n_object = {'watch_url': form.url.data.strip(),
|
||||
'notification_urls': form.notification_urls.data,
|
||||
'uuid': uuid}
|
||||
notification_q.put(n_object)
|
||||
|
||||
flash('Notifications queued.')
|
||||
|
||||
# Diff page [edit] link should go back to diff page
|
||||
if request.args.get("next") and request.args.get("next") == 'diff':
|
||||
return redirect(url_for('diff_history_page', uuid=uuid))
|
||||
else:
|
||||
return redirect(url_for('index'))
|
||||
|
||||
else:
|
||||
if request.method == 'POST' and not form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
# Re #110 offer the default minutes
|
||||
using_default_minutes = False
|
||||
if form.minutes_between_check.data == None:
|
||||
form.minutes_between_check.data = datastore.data['settings']['requests']['minutes_between_check']
|
||||
using_default_minutes = True
|
||||
|
||||
output = render_template("edit.html",
|
||||
uuid=uuid,
|
||||
watch=datastore.data['watching'][uuid],
|
||||
form=form,
|
||||
using_default_minutes=using_default_minutes
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
@app.route("/settings", methods=['GET', "POST"])
|
||||
@login_required
|
||||
def settings_page():
|
||||
|
||||
from backend import forms
|
||||
form = forms.globalSettingsForm(request.form)
|
||||
|
||||
if request.method == 'GET':
|
||||
form.minutes_between_check.data = int(datastore.data['settings']['requests']['minutes_between_check'])
|
||||
form.notification_urls.data = datastore.data['settings']['application']['notification_urls']
|
||||
form.extract_title_as_title.data = datastore.data['settings']['application']['extract_title_as_title']
|
||||
form.notification_title.data = datastore.data['settings']['application']['notification_title']
|
||||
form.notification_body.data = datastore.data['settings']['application']['notification_body']
|
||||
|
||||
# Password unset is a GET
|
||||
if request.values.get('removepassword') == 'yes':
|
||||
from pathlib import Path
|
||||
datastore.data['settings']['application']['password'] = False
|
||||
flash("Password protection removed.", 'notice')
|
||||
flask_login.logout_user()
|
||||
return redirect(url_for('settings_page'))
|
||||
|
||||
if request.method == 'POST' and form.validate():
|
||||
|
||||
datastore.data['settings']['application']['notification_urls'] = form.notification_urls.data
|
||||
datastore.data['settings']['requests']['minutes_between_check'] = form.minutes_between_check.data
|
||||
datastore.data['settings']['application']['extract_title_as_title'] = form.extract_title_as_title.data
|
||||
datastore.data['settings']['application']['notification_title'] = form.notification_title.data
|
||||
datastore.data['settings']['application']['notification_body'] = form.notification_body.data
|
||||
|
||||
datastore.data['settings']['application']['notification_urls'] = form.notification_urls.data
|
||||
datastore.needs_write = True
|
||||
|
||||
if form.trigger_check.data and len(form.notification_urls.data):
|
||||
n_object = {'watch_url': "Test from changedetection.io!",
|
||||
'notification_urls': form.notification_urls.data}
|
||||
notification_q.put(n_object)
|
||||
flash('Notifications queued.')
|
||||
|
||||
if form.password.encrypted_password:
|
||||
datastore.data['settings']['application']['password'] = form.password.encrypted_password
|
||||
flash("Password protection enabled.", 'notice')
|
||||
flask_login.logout_user()
|
||||
return redirect(url_for('index'))
|
||||
|
||||
flash("Settings updated.")
|
||||
|
||||
if request.method == 'POST' and not form.validate():
|
||||
flash("An error occurred, please see below.", "error")
|
||||
|
||||
output = render_template("settings.html", form=form)
|
||||
return output
|
||||
|
||||
@app.route("/import", methods=['GET', "POST"])
|
||||
@login_required
|
||||
def import_page():
|
||||
import validators
|
||||
remaining_urls = []
|
||||
|
||||
good = 0
|
||||
|
||||
if request.method == 'POST':
|
||||
urls = request.values.get('urls').split("\n")
|
||||
for url in urls:
|
||||
url = url.strip()
|
||||
if len(url) and validators.url(url):
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag="")
|
||||
# Straight into the queue.
|
||||
update_q.put(new_uuid)
|
||||
good += 1
|
||||
else:
|
||||
if len(url):
|
||||
remaining_urls.append(url)
|
||||
|
||||
flash("{} Imported, {} Skipped.".format(good, len(remaining_urls)))
|
||||
|
||||
if len(remaining_urls) == 0:
|
||||
# Looking good, redirect to index.
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# Could be some remaining, or we could be on GET
|
||||
output = render_template("import.html",
|
||||
remaining="\n".join(remaining_urls)
|
||||
)
|
||||
return output
|
||||
|
||||
# Clear all statuses, so we do not see the 'unviewed' class
|
||||
@app.route("/api/mark-all-viewed", methods=['GET'])
|
||||
@login_required
|
||||
def mark_all_viewed():
|
||||
|
||||
# Save the current newest history as the most recently viewed
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
datastore.set_last_viewed(watch_uuid, watch['newest_history_key'])
|
||||
|
||||
flash("Cleared all statuses.")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
@app.route("/diff/<string:uuid>", methods=['GET'])
|
||||
@login_required
|
||||
def diff_history_page(uuid):
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')]
|
||||
try:
|
||||
watch = datastore.data['watching'][uuid]
|
||||
except KeyError:
|
||||
flash("No history found for the specified link, bad link?", "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
dates = list(watch['history'].keys())
|
||||
# Convert to int, sort and back to str again
|
||||
dates = [int(i) for i in dates]
|
||||
dates.sort(reverse=True)
|
||||
dates = [str(i) for i in dates]
|
||||
|
||||
if len(dates) < 2:
|
||||
flash("Not enough saved change detection snapshots to produce a report.", "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# Save the current newest history as the most recently viewed
|
||||
datastore.set_last_viewed(uuid, dates[0])
|
||||
|
||||
newest_file = watch['history'][dates[0]]
|
||||
with open(newest_file, 'r') as f:
|
||||
newest_version_file_contents = f.read()
|
||||
|
||||
previous_version = request.args.get('previous_version')
|
||||
|
||||
try:
|
||||
previous_file = watch['history'][previous_version]
|
||||
except KeyError:
|
||||
# Not present, use a default value, the second one in the sorted list.
|
||||
previous_file = watch['history'][dates[1]]
|
||||
|
||||
with open(previous_file, 'r') as f:
|
||||
previous_version_file_contents = f.read()
|
||||
|
||||
output = render_template("diff.html", watch_a=watch,
|
||||
newest=newest_version_file_contents,
|
||||
previous=previous_version_file_contents,
|
||||
extra_stylesheets=extra_stylesheets,
|
||||
versions=dates[1:],
|
||||
uuid=uuid,
|
||||
newest_version_timestamp=dates[0],
|
||||
current_previous_version=str(previous_version),
|
||||
current_diff_url=watch['url'],
|
||||
extra_title=" - Diff - {}".format(watch['title'] if watch['title'] else watch['url']),
|
||||
left_sticky= True )
|
||||
|
||||
return output
|
||||
|
||||
@app.route("/preview/<string:uuid>", methods=['GET'])
|
||||
@login_required
|
||||
def preview_page(uuid):
|
||||
|
||||
# More for testing, possible to return the first/only
|
||||
if uuid == 'first':
|
||||
uuid = list(datastore.data['watching'].keys()).pop()
|
||||
|
||||
extra_stylesheets = [url_for('static_content', group='styles', filename='diff.css')]
|
||||
|
||||
try:
|
||||
watch = datastore.data['watching'][uuid]
|
||||
except KeyError:
|
||||
flash("No history found for the specified link, bad link?", "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
newest = list(watch['history'].keys())[-1]
|
||||
with open(watch['history'][newest], 'r') as f:
|
||||
content = f.readlines()
|
||||
|
||||
output = render_template("preview.html",
|
||||
content=content,
|
||||
extra_stylesheets=extra_stylesheets,
|
||||
current_diff_url=watch['url'],
|
||||
uuid=uuid)
|
||||
return output
|
||||
|
||||
|
||||
@app.route("/favicon.ico", methods=['GET'])
|
||||
def favicon():
|
||||
return send_from_directory("/app/static/images", filename="favicon.ico")
|
||||
|
||||
# We're good but backups are even better!
|
||||
@app.route("/backup", methods=['GET'])
|
||||
@login_required
|
||||
def get_backup():
|
||||
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
# Remove any existing backup file, for now we just keep one file
|
||||
for previous_backup_filename in Path(app.config['datastore_path']).rglob('changedetection-backup-*.zip'):
|
||||
os.unlink(previous_backup_filename)
|
||||
|
||||
# create a ZipFile object
|
||||
backupname = "changedetection-backup-{}.zip".format(int(time.time()))
|
||||
|
||||
# We only care about UUIDS from the current index file
|
||||
uuids = list(datastore.data['watching'].keys())
|
||||
backup_filepath = os.path.join(app.config['datastore_path'], backupname)
|
||||
|
||||
with zipfile.ZipFile(backup_filepath, "w",
|
||||
compression=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8) as zipObj:
|
||||
|
||||
# Be sure we're written fresh
|
||||
datastore.sync_to_json()
|
||||
|
||||
# Add the index
|
||||
zipObj.write(os.path.join(app.config['datastore_path'], "url-watches.json"), arcname="url-watches.json")
|
||||
|
||||
# Add the flask app secret
|
||||
zipObj.write(os.path.join(app.config['datastore_path'], "secret.txt"), arcname="secret.txt")
|
||||
|
||||
# Add any snapshot data we find, use the full path to access the file, but make the file 'relative' in the Zip.
|
||||
for txt_file_path in Path(app.config['datastore_path']).rglob('*.txt'):
|
||||
parent_p = txt_file_path.parent
|
||||
if parent_p.name in uuids:
|
||||
zipObj.write(txt_file_path,
|
||||
arcname=str(txt_file_path).replace(app.config['datastore_path'], ''),
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8)
|
||||
|
||||
# Create a list file with just the URLs, so it's easier to port somewhere else in the future
|
||||
list_file = os.path.join(app.config['datastore_path'], "url-list.txt")
|
||||
with open(list_file, "w") as f:
|
||||
for uuid in datastore.data['watching']:
|
||||
url = datastore.data['watching'][uuid]['url']
|
||||
f.write("{}\r\n".format(url))
|
||||
|
||||
# Add it to the Zip
|
||||
zipObj.write(list_file,
|
||||
arcname="url-list.txt",
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8)
|
||||
|
||||
return send_from_directory(app.config['datastore_path'], backupname, as_attachment=True)
|
||||
|
||||
@app.route("/static/<string:group>/<string:filename>", methods=['GET'])
|
||||
def static_content(group, filename):
|
||||
# These files should be in our subdirectory
|
||||
full_path = os.path.realpath(__file__)
|
||||
p = os.path.dirname(full_path)
|
||||
|
||||
try:
|
||||
return send_from_directory("{}/static/{}".format(p, group), filename=filename)
|
||||
except FileNotFoundError:
|
||||
abort(404)
|
||||
|
||||
@app.route("/api/add", methods=['POST'])
|
||||
@login_required
|
||||
def api_watch_add():
|
||||
from backend import forms
|
||||
form = forms.quickWatchForm(request.form)
|
||||
|
||||
if form.validate():
|
||||
|
||||
url = request.form.get('url').strip()
|
||||
if datastore.url_exists(url):
|
||||
flash('The URL {} already exists'.format(url), "error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
# @todo add_watch should throw a custom Exception for validation etc
|
||||
new_uuid = datastore.add_watch(url=url, tag=request.form.get('tag').strip())
|
||||
# Straight into the queue.
|
||||
update_q.put(new_uuid)
|
||||
|
||||
flash("Watch added.")
|
||||
return redirect(url_for('index'))
|
||||
else:
|
||||
flash("Error")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
@app.route("/api/delete", methods=['GET'])
|
||||
@login_required
|
||||
def api_delete():
|
||||
|
||||
uuid = request.args.get('uuid')
|
||||
datastore.delete(uuid)
|
||||
flash('Deleted.')
|
||||
|
||||
return redirect(url_for('index'))
|
||||
|
||||
@app.route("/api/checknow", methods=['GET'])
|
||||
@login_required
|
||||
def api_watch_checknow():
|
||||
|
||||
tag = request.args.get('tag')
|
||||
uuid = request.args.get('uuid')
|
||||
i = 0
|
||||
|
||||
running_uuids = []
|
||||
for t in running_update_threads:
|
||||
running_uuids.append(t.current_uuid)
|
||||
|
||||
# @todo check thread is running and skip
|
||||
|
||||
if uuid:
|
||||
if uuid not in running_uuids:
|
||||
update_q.put(uuid)
|
||||
i = 1
|
||||
|
||||
elif tag != None:
|
||||
# Items that have this current tag
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
if (tag != None and tag in watch['tag']):
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(watch_uuid)
|
||||
i += 1
|
||||
|
||||
else:
|
||||
# No tag, no uuid, add everything.
|
||||
for watch_uuid, watch in datastore.data['watching'].items():
|
||||
|
||||
if watch_uuid not in running_uuids and not datastore.data['watching'][watch_uuid]['paused']:
|
||||
update_q.put(watch_uuid)
|
||||
i += 1
|
||||
flash("{} watches are rechecking.".format(i))
|
||||
return redirect(url_for('index', tag=tag))
|
||||
|
||||
# @todo handle ctrl break
|
||||
ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start()
|
||||
|
||||
threading.Thread(target=notification_runner).start()
|
||||
|
||||
# Check for new release version
|
||||
threading.Thread(target=check_for_new_version).start()
|
||||
return app
|
||||
|
||||
|
||||
# Check for new version and anonymous stats
|
||||
def check_for_new_version():
|
||||
import requests
|
||||
|
||||
import urllib3
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
while not app.config.exit.is_set():
|
||||
try:
|
||||
r = requests.post("https://changedetection.io/check-ver.php",
|
||||
data={'version': datastore.data['version_tag'],
|
||||
'app_guid': datastore.data['app_guid'],
|
||||
'watch_count': len(datastore.data['watching'])
|
||||
},
|
||||
|
||||
verify=False)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if "new_version" in r.text:
|
||||
app.config['NEW_VERSION_AVAILABLE'] = True
|
||||
except:
|
||||
pass
|
||||
|
||||
# Check daily
|
||||
app.config.exit.wait(86400)
|
||||
|
||||
def notification_runner():
|
||||
while not app.config.exit.is_set():
|
||||
try:
|
||||
# At the moment only one thread runs (single runner)
|
||||
n_object = notification_q.get(block=False)
|
||||
except queue.Empty:
|
||||
time.sleep(1)
|
||||
|
||||
else:
|
||||
# Process notifications
|
||||
try:
|
||||
from backend import notification
|
||||
notification.process_notification(n_object, datastore)
|
||||
|
||||
except Exception as e:
|
||||
print("Watch URL: {} Error {}".format(n_object['watch_url'], e))
|
||||
|
||||
|
||||
|
||||
# Thread runner to check every minute, look for new watches to feed into the Queue.
|
||||
def ticker_thread_check_time_launch_checks():
|
||||
from backend import update_worker
|
||||
|
||||
# Spin up Workers.
|
||||
for _ in range(datastore.data['settings']['requests']['workers']):
|
||||
new_worker = update_worker.update_worker(update_q, notification_q, app, datastore)
|
||||
running_update_threads.append(new_worker)
|
||||
new_worker.start()
|
||||
|
||||
while not app.config.exit.is_set():
|
||||
|
||||
# Get a list of watches by UUID that are currently fetching data
|
||||
running_uuids = []
|
||||
for t in running_update_threads:
|
||||
if t.current_uuid:
|
||||
running_uuids.append(t.current_uuid)
|
||||
|
||||
# Check for watches outside of the time threshold to put in the thread queue.
|
||||
for uuid, watch in datastore.data['watching'].items():
|
||||
|
||||
# If they supplied an individual entry minutes to threshold.
|
||||
if 'minutes_between_check' in watch and watch['minutes_between_check'] is not None:
|
||||
max_time = watch['minutes_between_check'] * 60
|
||||
else:
|
||||
# Default system wide.
|
||||
max_time = datastore.data['settings']['requests']['minutes_between_check'] * 60
|
||||
|
||||
threshold = time.time() - max_time
|
||||
|
||||
# Yeah, put it in the queue, it's more than time.
|
||||
if not watch['paused'] and watch['last_checked'] <= threshold:
|
||||
if not uuid in running_uuids and uuid not in update_q.queue:
|
||||
update_q.put(uuid)
|
||||
|
||||
# Wait a few seconds before checking the list again
|
||||
time.sleep(3)
|
||||
|
||||
# Should be low so we can break this out in testing
|
||||
app.config.exit.wait(1)
|
||||
@@ -1,176 +0,0 @@
|
||||
import time
|
||||
import requests
|
||||
import hashlib
|
||||
from inscriptis import get_text
|
||||
import urllib3
|
||||
from . import html_tools
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
# Some common stuff here that can be moved to a base class
|
||||
class perform_site_check():
|
||||
|
||||
def __init__(self, *args, datastore, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
def strip_ignore_text(self, content, list_ignore_text):
|
||||
import re
|
||||
ignore = []
|
||||
ignore_regex = []
|
||||
for k in list_ignore_text:
|
||||
|
||||
# Is it a regex?
|
||||
if k[0] == '/':
|
||||
ignore_regex.append(k.strip(" /"))
|
||||
else:
|
||||
ignore.append(k)
|
||||
|
||||
output = []
|
||||
for line in content.splitlines():
|
||||
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
if len(line.strip()):
|
||||
regex_matches = False
|
||||
|
||||
# if any of these match, skip
|
||||
for regex in ignore_regex:
|
||||
try:
|
||||
if re.search(regex, line, re.IGNORECASE):
|
||||
regex_matches = True
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
if not regex_matches and not any(skip_text in line for skip_text in ignore):
|
||||
output.append(line.encode('utf8'))
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
|
||||
|
||||
|
||||
def run(self, uuid):
|
||||
timestamp = int(time.time()) # used for storage etc too
|
||||
|
||||
stripped_text_from_html = False
|
||||
changed_detected = False
|
||||
|
||||
update_obj = {'previous_md5': self.datastore.data['watching'][uuid]['previous_md5'],
|
||||
'history': {},
|
||||
"last_checked": timestamp
|
||||
}
|
||||
|
||||
extra_headers = self.datastore.get_val(uuid, 'headers')
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = self.datastore.data['settings']['headers'].copy()
|
||||
request_headers.update(extra_headers)
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
# do this by accident.
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
try:
|
||||
timeout = self.datastore.data['settings']['requests']['timeout']
|
||||
except KeyError:
|
||||
# @todo yeah this should go back to the default value in store.py, but this whole object should abstract off it
|
||||
timeout = 15
|
||||
|
||||
try:
|
||||
url = self.datastore.get_val(uuid, 'url')
|
||||
|
||||
r = requests.get(url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
verify=False)
|
||||
|
||||
html = r.text
|
||||
|
||||
is_html = True
|
||||
css_filter_rule = self.datastore.data['watching'][uuid]['css_filter']
|
||||
if css_filter_rule and len(css_filter_rule.strip()):
|
||||
if 'json:' in css_filter_rule:
|
||||
# POC hack, @todo rename vars, see how it fits in with the javascript version
|
||||
import json
|
||||
from jsonpath_ng import jsonpath, parse
|
||||
|
||||
json_data = json.loads(html)
|
||||
jsonpath_expression = parse(css_filter_rule.replace('json:', ''))
|
||||
match = jsonpath_expression.find(json_data)
|
||||
s = []
|
||||
|
||||
# More than one result, we will return it as a JSON list.
|
||||
if len(match) > 1:
|
||||
for i in match:
|
||||
s.append(i.value)
|
||||
|
||||
# Single value, use just the value, as it could be later used in a token in notifications.
|
||||
if len(match) == 1:
|
||||
s = match[0].value
|
||||
|
||||
stripped_text_from_html = json.dumps(s, indent=4)
|
||||
is_html = False
|
||||
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html = html_tools.css_filter(css_filter=css_filter_rule, html_content=r.content)
|
||||
|
||||
if is_html:
|
||||
stripped_text_from_html = get_text(html)
|
||||
|
||||
# Usually from networkIO/requests level
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout) as e:
|
||||
update_obj["last_error"] = str(e)
|
||||
print(str(e))
|
||||
|
||||
except requests.exceptions.MissingSchema:
|
||||
print("Skipping {} due to missing schema/bad url".format(uuid))
|
||||
|
||||
# Usually from html2text level
|
||||
except Exception as e:
|
||||
# except UnicodeDecodeError as e:
|
||||
update_obj["last_error"] = str(e)
|
||||
print(str(e))
|
||||
# figure out how to deal with this cleaner..
|
||||
# 'utf-8' codec can't decode byte 0xe9 in position 480: invalid continuation byte
|
||||
|
||||
|
||||
else:
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
|
||||
update_obj["last_check_status"] = r.status_code
|
||||
update_obj["last_error"] = False
|
||||
|
||||
if not len(r.text):
|
||||
update_obj["last_error"] = "Empty reply"
|
||||
|
||||
# If there's text to skip
|
||||
# @todo we could abstract out the get_text() to handle this cleaner
|
||||
if len(self.datastore.data['watching'][uuid]['ignore_text']):
|
||||
content = self.strip_ignore_text(stripped_text_from_html,
|
||||
self.datastore.data['watching'][uuid]['ignore_text'])
|
||||
else:
|
||||
content = stripped_text_from_html.encode('utf8')
|
||||
|
||||
fetched_md5 = hashlib.md5(content).hexdigest()
|
||||
|
||||
# could be None or False depending on JSON type
|
||||
if self.datastore.data['watching'][uuid]['previous_md5'] != fetched_md5:
|
||||
changed_detected = True
|
||||
|
||||
# Don't confuse people by updating as last-changed, when it actually just changed from None..
|
||||
if self.datastore.get_val(uuid, 'previous_md5'):
|
||||
update_obj["last_changed"] = timestamp
|
||||
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
# Extract title as title
|
||||
if self.datastore.data['settings']['application']['extract_title_as_title']:
|
||||
if not self.datastore.data['watching'][uuid]['title'] or not len(self.datastore.data['watching'][uuid]['title']):
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=html)
|
||||
|
||||
|
||||
return changed_detected, update_obj, stripped_text_from_html
|
||||
159
backend/forms.py
@@ -1,159 +0,0 @@
|
||||
from wtforms import Form, BooleanField, StringField, PasswordField, validators, IntegerField, fields, TextAreaField, \
|
||||
Field
|
||||
from wtforms import widgets
|
||||
from wtforms.validators import ValidationError
|
||||
from wtforms.fields import html5
|
||||
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
return "\r\n".join(self.data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
# Remove empty strings
|
||||
cleaned = list(filter(None, valuelist[0].split("\n")))
|
||||
self.data = [x.strip() for x in cleaned]
|
||||
p = 1
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
|
||||
class SaltyPasswordField(StringField):
|
||||
widget = widgets.PasswordInput()
|
||||
encrypted_password = ""
|
||||
|
||||
def build_password(self, password):
|
||||
import hashlib
|
||||
import base64
|
||||
import secrets
|
||||
|
||||
# Make a new salt on every new password and store it with the password
|
||||
salt = secrets.token_bytes(32)
|
||||
|
||||
key = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), salt, 100000)
|
||||
store = base64.b64encode(salt + key).decode('ascii')
|
||||
|
||||
return store
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
# Be really sure it's non-zero in length
|
||||
if len(valuelist[0].strip()) > 0:
|
||||
self.encrypted_password = self.build_password(valuelist[0])
|
||||
self.data = ""
|
||||
else:
|
||||
self.data = False
|
||||
|
||||
|
||||
# Separated by key:value
|
||||
class StringDictKeyValue(StringField):
|
||||
widget = widgets.TextArea()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
output = u''
|
||||
for k in self.data.keys():
|
||||
output += "{}: {}\r\n".format(k, self.data[k])
|
||||
|
||||
return output
|
||||
else:
|
||||
return u''
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = {}
|
||||
# Remove empty strings
|
||||
cleaned = list(filter(None, valuelist[0].split("\n")))
|
||||
for s in cleaned:
|
||||
parts = s.strip().split(':')
|
||||
if len(parts) == 2:
|
||||
self.data.update({parts[0].strip(): parts[1].strip()})
|
||||
|
||||
else:
|
||||
self.data = {}
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
Validates that anything that looks like a regex passes as a regex
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import re
|
||||
|
||||
for line in field.data:
|
||||
if line[0] == '/' and line[-1] == '/':
|
||||
# Because internally we dont wrap in /
|
||||
line = line.strip('/')
|
||||
try:
|
||||
re.compile(line)
|
||||
except re.error:
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
|
||||
class ValidateCSSJSONInput(object):
|
||||
"""
|
||||
Filter validation
|
||||
@todo CSS validator ;)
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
if 'json:' in field.data:
|
||||
from jsonpath_ng.exceptions import JsonPathParserError
|
||||
from jsonpath_ng import jsonpath, parse
|
||||
|
||||
input = field.data.replace('json:', '')
|
||||
|
||||
try:
|
||||
parse(input)
|
||||
except JsonPathParserError as e:
|
||||
message = field.gettext('\'%s\' is not a valid JSONPath expression. (%s)')
|
||||
raise ValidationError(message % (input, str(e)))
|
||||
|
||||
class quickWatchForm(Form):
|
||||
# https://wtforms.readthedocs.io/en/2.3.x/fields/#module-wtforms.fields.html5
|
||||
# `require_tld` = False is needed even for the test harness "http://localhost:5005.." to run
|
||||
|
||||
url = html5.URLField('URL', [validators.URL(require_tld=False)])
|
||||
tag = StringField('Tag', [validators.Optional(), validators.Length(max=35)])
|
||||
|
||||
class watchForm(quickWatchForm):
|
||||
|
||||
minutes_between_check = html5.IntegerField('Maximum time in minutes until recheck',
|
||||
[validators.Optional(), validators.NumberRange(min=1)])
|
||||
css_filter = StringField('CSS/JSON Filter', [ValidateCSSJSONInput()])
|
||||
title = StringField('Title')
|
||||
|
||||
ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
notification_urls = StringListField('Notification URL List')
|
||||
headers = StringDictKeyValue('Request Headers')
|
||||
trigger_check = BooleanField('Send test notification on save')
|
||||
|
||||
|
||||
class globalSettingsForm(Form):
|
||||
|
||||
password = SaltyPasswordField()
|
||||
|
||||
minutes_between_check = html5.IntegerField('Maximum time in minutes until recheck',
|
||||
[validators.NumberRange(min=1)])
|
||||
|
||||
notification_urls = StringListField('Notification URL List')
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title')
|
||||
trigger_check = BooleanField('Send test notification on save')
|
||||
|
||||
notification_title = StringField('Notification Title')
|
||||
notification_body = TextAreaField('Notification Body')
|
||||
@@ -1,26 +0,0 @@
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
||||
def css_filter(css_filter, html_content):
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
html_block = ""
|
||||
for item in soup.select(css_filter, separator=""):
|
||||
html_block += str(item)
|
||||
|
||||
return html_block + "\n"
|
||||
|
||||
|
||||
# Extract/find element
|
||||
def extract_element(find='title', html_content=''):
|
||||
|
||||
#Re #106, be sure to handle when its not found
|
||||
element_text = None
|
||||
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
result = soup.find(find)
|
||||
if result and result.string:
|
||||
element_text = result.string.strip()
|
||||
|
||||
return element_text
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
import os
|
||||
import apprise
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
apobj = apprise.Apprise()
|
||||
for url in n_object['notification_urls']:
|
||||
apobj.add(url.strip())
|
||||
|
||||
# Get the notification body from datastore
|
||||
n_body = datastore.data['settings']['application']['notification_body']
|
||||
# Get the notification title from the datastore
|
||||
n_title = datastore.data['settings']['application']['notification_title']
|
||||
|
||||
# Insert variables into the notification content
|
||||
notification_parameters = create_notification_parameters(n_object)
|
||||
raw_notification_text = [n_body, n_title]
|
||||
|
||||
parameterised_notification_text = dict(
|
||||
[
|
||||
(i, n.replace(n, n.format(**notification_parameters)))
|
||||
for i, n in zip(['body', 'title'], raw_notification_text)
|
||||
]
|
||||
)
|
||||
|
||||
apobj.notify(
|
||||
body=parameterised_notification_text["body"],
|
||||
title=parameterised_notification_text["title"]
|
||||
)
|
||||
|
||||
|
||||
# Notification title + body content parameters get created here.
|
||||
def create_notification_parameters(n_object):
|
||||
|
||||
# in the case we send a test notification from the main settings, there is no UUID.
|
||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||
|
||||
# Create URLs to customise the notification with
|
||||
base_url = os.getenv('BASE_URL', '').strip('"')
|
||||
watch_url = n_object['watch_url']
|
||||
|
||||
if base_url != '':
|
||||
diff_url = "{}/diff/{}".format(base_url, uuid)
|
||||
preview_url = "{}/preview/{}".format(base_url, uuid)
|
||||
else:
|
||||
diff_url = preview_url = ''
|
||||
|
||||
return {
|
||||
'base_url': base_url,
|
||||
'watch_url': watch_url,
|
||||
'diff_url': diff_url,
|
||||
'preview_url': preview_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
# live_server will throw errors even with live_server_scope=function if I have the live_server setup in different functions
|
||||
# and I like to restart the server for each test (and have the test cleanup after each test)
|
||||
# merge request welcome :)
|
||||
|
||||
|
||||
# exit when any command fails
|
||||
set -e
|
||||
|
||||
# Re #65 - Ability to include a link back to the installation, in the notification.
|
||||
export BASE_URL="https://foobar.com"
|
||||
|
||||
find tests/test_*py -type f|while read test_name
|
||||
do
|
||||
echo "TEST RUNNING $test_name"
|
||||
pytest $test_name
|
||||
done
|
||||
|
Before Width: | Height: | Size: 4.2 KiB |
@@ -1,16 +0,0 @@
|
||||
window.addEventListener("load", (event) => {
|
||||
// just an example for now
|
||||
function toggleVisible(elem) {
|
||||
// theres better ways todo this
|
||||
var x = document.getElementById(elem);
|
||||
if (x.style.display === "block") {
|
||||
x.style.display = "none";
|
||||
} else {
|
||||
x.style.display = "block";
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById("toggle-customise-notifications").onclick = function () {
|
||||
toggleVisible("notification-customisation");
|
||||
};
|
||||
});
|
||||
1
backend/static/styles/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
node_modules
|
||||
3485
backend/static/styles/package-lock.json
generated
378
backend/store.py
@@ -1,378 +0,0 @@
|
||||
from os import unlink, path, mkdir
|
||||
import json
|
||||
import uuid as uuid_builder
|
||||
from threading import Lock
|
||||
from copy import deepcopy
|
||||
|
||||
import logging
|
||||
import time
|
||||
import threading
|
||||
|
||||
|
||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||
# Open a github issue if you know something :)
|
||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||
class ChangeDetectionStore:
|
||||
lock = Lock()
|
||||
|
||||
def __init__(self, datastore_path="/datastore", include_default_watches=True):
|
||||
self.needs_write = False
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.stop_thread = False
|
||||
|
||||
self.__data = {
|
||||
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
|
||||
'watching': {},
|
||||
'settings': {
|
||||
'headers': {
|
||||
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
|
||||
'Accept-Encoding': 'gzip, deflate', # No support for brolti in python requests yet.
|
||||
'Accept-Language': 'en-GB,en-US;q=0.9,en;'
|
||||
},
|
||||
'requests': {
|
||||
'timeout': 15, # Default 15 seconds
|
||||
'minutes_between_check': 3 * 60, # Default 3 hours
|
||||
'workers': 10 # Number of threads, lower is better for slow connections
|
||||
},
|
||||
'application': {
|
||||
'password': False,
|
||||
'extract_title_as_title': False,
|
||||
'notification_urls': [], # Apprise URL list
|
||||
# Custom notification content
|
||||
'notification_title': 'ChangeDetection.io Notification - {watch_url}',
|
||||
'notification_body': '{base_url}'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Base definition for all watchers
|
||||
self.generic_definition = {
|
||||
'url': None,
|
||||
'tag': None,
|
||||
'last_checked': 0,
|
||||
'last_changed': 0,
|
||||
'paused': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
'newest_history_key': "",
|
||||
'title': None,
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
'minutes_between_check': None,
|
||||
'previous_md5': "",
|
||||
'uuid': str(uuid_builder.uuid4()),
|
||||
'headers': {}, # Extra headers to send
|
||||
'history': {}, # Dict of timestamp and output stripped filename
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'css_filter': "",
|
||||
}
|
||||
|
||||
if path.isfile('backend/source.txt'):
|
||||
with open('backend/source.txt') as f:
|
||||
# Should be set in Dockerfile to look for /source.txt , this will give us the git commit #
|
||||
# So when someone gives us a backup file to examine, we know exactly what code they were running.
|
||||
self.__data['build_sha'] = f.read()
|
||||
|
||||
try:
|
||||
# @todo retest with ", encoding='utf-8'"
|
||||
with open(self.json_store_path) as json_file:
|
||||
from_disk = json.load(json_file)
|
||||
|
||||
# @todo isnt there a way todo this dict.update recursively?
|
||||
# Problem here is if the one on the disk is missing a sub-struct, it wont be present anymore.
|
||||
if 'watching' in from_disk:
|
||||
self.__data['watching'].update(from_disk['watching'])
|
||||
|
||||
if 'app_guid' in from_disk:
|
||||
self.__data['app_guid'] = from_disk['app_guid']
|
||||
|
||||
if 'settings' in from_disk:
|
||||
if 'headers' in from_disk['settings']:
|
||||
self.__data['settings']['headers'].update(from_disk['settings']['headers'])
|
||||
|
||||
if 'requests' in from_disk['settings']:
|
||||
self.__data['settings']['requests'].update(from_disk['settings']['requests'])
|
||||
|
||||
if 'application' in from_disk['settings']:
|
||||
self.__data['settings']['application'].update(from_disk['settings']['application'])
|
||||
|
||||
# Reinitialise each `watching` with our generic_definition in the case that we add a new var in the future.
|
||||
# @todo pretty sure theres a python we todo this with an abstracted(?) object!
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
_blank = deepcopy(self.generic_definition)
|
||||
_blank.update(watch)
|
||||
self.__data['watching'].update({uuid: _blank})
|
||||
self.__data['watching'][uuid]['newest_history_key'] = self.get_newest_history_key(uuid)
|
||||
print("Watching:", uuid, self.__data['watching'][uuid]['url'])
|
||||
|
||||
# First time ran, doesnt exist.
|
||||
except (FileNotFoundError, json.decoder.JSONDecodeError):
|
||||
if include_default_watches:
|
||||
print("Creating JSON store at", self.datastore_path)
|
||||
|
||||
self.add_watch(url='http://www.quotationspage.com/random.php', tag='test')
|
||||
self.add_watch(url='https://news.ycombinator.com/', tag='Tech news')
|
||||
self.add_watch(url='https://www.gov.uk/coronavirus', tag='Covid')
|
||||
self.add_watch(url='https://changedetection.io', tag='Tech news')
|
||||
|
||||
self.__data['version_tag'] = "0.38.1"
|
||||
|
||||
# Helper to remove password protection
|
||||
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
|
||||
if path.isfile(password_reset_lockfile):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
unlink(password_reset_lockfile)
|
||||
|
||||
if not 'app_guid' in self.__data:
|
||||
import sys
|
||||
import os
|
||||
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
||||
self.__data['app_guid'] = "test-" + str(uuid_builder.uuid4())
|
||||
else:
|
||||
self.__data['app_guid'] = str(uuid_builder.uuid4())
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
save_data_thread = threading.Thread(target=self.save_datastore).start()
|
||||
|
||||
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
|
||||
def get_newest_history_key(self, uuid):
|
||||
if len(self.__data['watching'][uuid]['history']) == 1:
|
||||
return 0
|
||||
|
||||
dates = list(self.__data['watching'][uuid]['history'].keys())
|
||||
# Convert to int, sort and back to str again
|
||||
dates = [int(i) for i in dates]
|
||||
dates.sort(reverse=True)
|
||||
if len(dates):
|
||||
# always keyed as str
|
||||
return str(dates[0])
|
||||
|
||||
return 0
|
||||
|
||||
def set_last_viewed(self, uuid, timestamp):
|
||||
self.data['watching'][uuid].update({'last_viewed': int(timestamp)})
|
||||
self.needs_write = True
|
||||
|
||||
def update_watch(self, uuid, update_obj):
|
||||
|
||||
# Skip if 'paused' state
|
||||
if self.__data['watching'][uuid]['paused']:
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
|
||||
# In python 3.9 we have the |= dict operator, but that still will lose data on nested structures...
|
||||
for dict_key, d in self.generic_definition.items():
|
||||
if isinstance(d, dict):
|
||||
if update_obj is not None and dict_key in update_obj:
|
||||
self.__data['watching'][uuid][dict_key].update(update_obj[dict_key])
|
||||
del (update_obj[dict_key])
|
||||
|
||||
self.__data['watching'][uuid].update(update_obj)
|
||||
self.__data['watching'][uuid]['newest_history_key'] = self.get_newest_history_key(uuid)
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
has_unviewed = False
|
||||
for uuid, v in self.__data['watching'].items():
|
||||
self.__data['watching'][uuid]['newest_history_key'] = self.get_newest_history_key(uuid)
|
||||
if int(v['newest_history_key']) <= int(v['last_viewed']):
|
||||
self.__data['watching'][uuid]['viewed'] = True
|
||||
|
||||
else:
|
||||
self.__data['watching'][uuid]['viewed'] = False
|
||||
has_unviewed = True
|
||||
|
||||
# #106 - Be sure this is None on empty string, False, None, etc
|
||||
if not self.__data['watching'][uuid]['title']:
|
||||
self.__data['watching'][uuid]['title'] = None
|
||||
|
||||
self.__data['has_unviewed'] = has_unviewed
|
||||
|
||||
return self.__data
|
||||
|
||||
def get_all_tags(self):
|
||||
tags = []
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
|
||||
# Support for comma separated list of tags.
|
||||
for tag in watch['tag'].split(','):
|
||||
tag = tag.strip()
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
|
||||
tags.sort()
|
||||
return tags
|
||||
|
||||
def unlink_history_file(self, path):
|
||||
try:
|
||||
unlink(path)
|
||||
except (FileNotFoundError, IOError):
|
||||
pass
|
||||
|
||||
# Delete a single watch by UUID
|
||||
def delete(self, uuid):
|
||||
with self.lock:
|
||||
if uuid == 'all':
|
||||
self.__data['watching'] = {}
|
||||
|
||||
# GitHub #30 also delete history records
|
||||
for uuid in self.data['watching']:
|
||||
for path in self.data['watching'][uuid]['history'].values():
|
||||
self.unlink_history_file(path)
|
||||
|
||||
else:
|
||||
for path in self.data['watching'][uuid]['history'].values():
|
||||
self.unlink_history_file(path)
|
||||
|
||||
del self.data['watching'][uuid]
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
def url_exists(self, url):
|
||||
|
||||
# Probably their should be dict...
|
||||
for watch in self.data['watching'].values():
|
||||
if watch['url'] == url:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def get_val(self, uuid, val):
|
||||
# Probably their should be dict...
|
||||
return self.data['watching'][uuid].get(val)
|
||||
|
||||
# Remove a watchs data but keep the entry (URL etc)
|
||||
def scrub_watch(self, uuid, limit_timestamp = False):
|
||||
|
||||
import hashlib
|
||||
del_timestamps = []
|
||||
|
||||
changes_removed = 0
|
||||
|
||||
for timestamp, path in self.data['watching'][uuid]['history'].items():
|
||||
if not limit_timestamp or (limit_timestamp is not False and int(timestamp) > limit_timestamp):
|
||||
self.unlink_history_file(path)
|
||||
del_timestamps.append(timestamp)
|
||||
changes_removed += 1
|
||||
|
||||
if not limit_timestamp:
|
||||
self.data['watching'][uuid]['last_checked'] = 0
|
||||
self.data['watching'][uuid]['last_changed'] = 0
|
||||
self.data['watching'][uuid]['previous_md5'] = 0
|
||||
|
||||
|
||||
for timestamp in del_timestamps:
|
||||
del self.data['watching'][uuid]['history'][str(timestamp)]
|
||||
|
||||
# If there was a limitstamp, we need to reset some meta data about the entry
|
||||
# This has to happen after we remove the others from the list
|
||||
if limit_timestamp:
|
||||
newest_key = self.get_newest_history_key(uuid)
|
||||
if newest_key:
|
||||
self.data['watching'][uuid]['last_checked'] = int(newest_key)
|
||||
# @todo should be the original value if it was less than newest key
|
||||
self.data['watching'][uuid]['last_changed'] = int(newest_key)
|
||||
try:
|
||||
with open(self.data['watching'][uuid]['history'][str(newest_key)], "rb") as fp:
|
||||
content = fp.read()
|
||||
self.data['watching'][uuid]['previous_md5'] = hashlib.md5(content).hexdigest()
|
||||
except (FileNotFoundError, IOError):
|
||||
self.data['watching'][uuid]['previous_md5'] = False
|
||||
pass
|
||||
|
||||
self.needs_write = True
|
||||
return changes_removed
|
||||
|
||||
def add_watch(self, url, tag):
|
||||
with self.lock:
|
||||
# @todo use a common generic version of this
|
||||
new_uuid = str(uuid_builder.uuid4())
|
||||
_blank = deepcopy(self.generic_definition)
|
||||
_blank.update({
|
||||
'url': url,
|
||||
'tag': tag,
|
||||
'uuid': new_uuid
|
||||
})
|
||||
|
||||
self.data['watching'][new_uuid] = _blank
|
||||
|
||||
# Get the directory ready
|
||||
output_path = "{}/{}".format(self.datastore_path, new_uuid)
|
||||
try:
|
||||
mkdir(output_path)
|
||||
except FileExistsError:
|
||||
print(output_path, "already exists.")
|
||||
|
||||
self.sync_to_json()
|
||||
return new_uuid
|
||||
|
||||
# Save some text file to the appropriate path and bump the history
|
||||
# result_obj from fetch_site_status.run()
|
||||
def save_history_text(self, uuid, result_obj, contents):
|
||||
|
||||
output_path = "{}/{}".format(self.datastore_path, uuid)
|
||||
fname = "{}/{}-{}.stripped.txt".format(output_path, result_obj['previous_md5'], str(time.time()))
|
||||
with open(fname, 'w') as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
# Update history with the stripped text for future reference, this will also mean we save the first
|
||||
# Should always be keyed by string(timestamp)
|
||||
self.update_watch(uuid, {"history": {str(result_obj["last_checked"]): fname}})
|
||||
|
||||
return fname
|
||||
|
||||
def sync_to_json(self):
|
||||
print("Saving..")
|
||||
data ={}
|
||||
|
||||
try:
|
||||
data = deepcopy(self.__data)
|
||||
except RuntimeError:
|
||||
time.sleep(0.5)
|
||||
print ("! Data changed when writing to JSON, trying again..")
|
||||
self.sync_to_json()
|
||||
return
|
||||
else:
|
||||
with open(self.json_store_path, 'w') as json_file:
|
||||
json.dump(data, json_file, indent=4)
|
||||
logging.info("Re-saved index")
|
||||
|
||||
self.needs_write = False
|
||||
|
||||
# Thread runner, this helps with thread/write issues when there are many operations that want to update the JSON
|
||||
# by just running periodically in one thread, according to python, dict updates are threadsafe.
|
||||
def save_datastore(self):
|
||||
|
||||
while True:
|
||||
if self.stop_thread:
|
||||
print("Shutting down datastore thread")
|
||||
return
|
||||
|
||||
if self.needs_write:
|
||||
self.sync_to_json()
|
||||
time.sleep(3)
|
||||
|
||||
# Go through the datastore path and remove any snapshots that are not mentioned in the index
|
||||
# This usually is not used, but can be handy.
|
||||
def remove_unused_snapshots(self):
|
||||
print ("Removing snapshots from datastore that are not in the index..")
|
||||
|
||||
index=[]
|
||||
for uuid in self.data['watching']:
|
||||
for id in self.data['watching'][uuid]['history']:
|
||||
index.append(self.data['watching'][uuid]['history'][str(id)])
|
||||
|
||||
import pathlib
|
||||
# Only in the sub-directories
|
||||
for item in pathlib.Path(self.datastore_path).rglob("*/*txt"):
|
||||
if not str(item) in index:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
@@ -1,25 +0,0 @@
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_simple_field(field) %}
|
||||
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
||||
<span {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
@@ -1,79 +0,0 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
<div class="edit-form monospaced-textarea">
|
||||
<form class="pure-form pure-form-stacked" action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next') ) }}" method="POST">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.url, placeholder="https://...", size=30, required=true) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.title, size=30) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.tag, size=10) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.minutes_between_check, size=5) }}
|
||||
{% if using_default_minutes %}
|
||||
<span class="pure-form-message-inline">Currently using the <a href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>, change to another value if you want to be specific.</span>
|
||||
{% else %}
|
||||
<span class="pure-form-message-inline">Set to blank to use the <a href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>.</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.css_filter, size=25, placeholder=".class-name or #some-id, or other CSS selector rule.") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <b>"json:"</b>, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a></li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS or JSONPath selector rules before filing an issue on GitHub! <a href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/>
|
||||
</span>
|
||||
</div>
|
||||
<!-- @todo: move to tabs --->
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.ignore_text, rows=5, placeholder="Some text to ignore in a line
|
||||
/some.regex\d{2}/ for case-INsensitive regex
|
||||
") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Each line processed separately, any line matching will be ignored.<br/>
|
||||
Regular Expression support, wrap the line in forward slash <b>/regex/</b>.
|
||||
</span>
|
||||
|
||||
</fieldset>
|
||||
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
Cookie: foobar
|
||||
User-Agent: wonderbra 1.0") }}
|
||||
</fieldset>
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com
|
||||
") }}
|
||||
<span class="pure-form-message-inline">Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service!</span>
|
||||
</div>
|
||||
|
||||
<div class="pure-controls">
|
||||
{{ render_field(form.trigger_check, rows=5) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<button type="submit" class="pure-button pure-button-primary">Save</button>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Cancel</a>
|
||||
<a href="{{url_for('api_delete', uuid=uuid)}}"
|
||||
class="pure-button button-small button-error ">Delete</a>
|
||||
</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
@@ -1,26 +0,0 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
|
||||
|
||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||
|
||||
<fieldset class="pure-group">
|
||||
<legend>One URL per line, URLs that do not pass validation will stay in the textarea.</legend>
|
||||
|
||||
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
|
||||
style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" rows="25">{{ remaining }}</textarea>
|
||||
</fieldset>
|
||||
<button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
|
||||
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<div id="settings">
|
||||
<h1>Current</h1>
|
||||
</div>
|
||||
|
||||
|
||||
<div id="diff-ui">
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
|
||||
<td id="diff-col">
|
||||
<span id="result">{% for row in content %}<pre>{{row}}</pre>{% endfor %}</span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
||||
{% endblock %}
|
||||
@@ -1,107 +0,0 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
<script type="text/javascript" src="static/js/settings.js"></script>
|
||||
<div class="edit-form">
|
||||
<form class="pure-form pure-form-stacked settings" action="{{url_for('settings_page')}}" method="POST">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.minutes_between_check, size=5) }}
|
||||
<span class="pure-form-message-inline">Default time for all watches, when the watch does not have a specific time setting.</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{% if current_user.is_authenticated %}
|
||||
<a href="{{url_for('settings_page', removepassword='yes')}}" class="pure-button pure-button-primary">Remove password</a>
|
||||
{% else %}
|
||||
{{ render_field(form.password, size=10) }}
|
||||
<span class="pure-form-message-inline">Password protection for your changedetection.io application.</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.extract_title_as_title) }}
|
||||
<span class="pure-form-message-inline">Note: This will automatically apply to all existing watches.</span>
|
||||
</div>
|
||||
|
||||
<div class="field-group">
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com") }}
|
||||
<div class="pure-form-message-inline">Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service!
|
||||
<a id="toggle-customise-notifications">Customise notification body: <i
|
||||
class="arrow down"></i></a>
|
||||
</div>
|
||||
</div>
|
||||
<div id="notification-customisation" style="display:none;">
|
||||
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_title, size=80) }}
|
||||
<span class="pure-form-message-inline">Title for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5) }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<span class="pure-form-message-inline">
|
||||
These tokens can be used in the notification body and title to
|
||||
customise the notification text.
|
||||
</span>
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Token</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>{base_url}</code></td>
|
||||
<td>The URL of the changedetection.io instance you are running.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_url}</code></td>
|
||||
<td>The URL being watched.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{preview_url}</code></td>
|
||||
<td>The URL of the preview page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_url}</code></td>
|
||||
<td>The URL of the diff page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{current_snapshot}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<span class="pure-form-message-inline">
|
||||
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.trigger_check) }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<button type="submit" class="pure-button pure-button-primary">Save</button>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<a href="{{url_for('index')}}" class="pure-button button-small button-cancel">Back</a>
|
||||
<a href="{{url_for('scrub_page')}}" class="pure-button button-small button-cancel">Delete History Snapshot Data</a>
|
||||
</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
@@ -1,98 +0,0 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_simple_field %}
|
||||
|
||||
<div class="box">
|
||||
|
||||
<form class="pure-form" action="{{ url_for('api_watch_add') }}" method="POST" id="new-watch-form">
|
||||
<fieldset>
|
||||
<legend>Add a new change detection watch</legend>
|
||||
{{ render_simple_field(form.url, placeholder="https://...", size=30, required=true) }}
|
||||
{{ render_simple_field(form.tag, size=10, value=active_tag if active_tag else '', placeholder="tag") }}
|
||||
<button type="submit" class="pure-button pure-button-primary">Watch</button>
|
||||
</fieldset>
|
||||
<!-- add extra stuff, like do a http POST and send headers -->
|
||||
<!-- user/pass r = requests.get('https://api.github.com/user', auth=('user', 'pass')) -->
|
||||
</form>
|
||||
<div>
|
||||
<a href="{{url_for('index')}}" class="pure-button button-tag {{'active' if not active_tag }}">All</a>
|
||||
{% for tag in tags %}
|
||||
{% if tag != "" %}
|
||||
<a href="{{url_for('index', tag=tag) }}" class="pure-button button-tag {{'active' if active_tag == tag }}">{{ tag }}</a>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
<div id="watch-table-wrapper">
|
||||
<table class="pure-table pure-table-striped watch-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>#</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th>Last Checked</th>
|
||||
<th>Last Changed</th>
|
||||
<th></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
|
||||
|
||||
{% for watch in watches %}
|
||||
<tr id="{{ watch.uuid }}"
|
||||
class="{{ loop.cycle('pure-table-odd', 'pure-table-even') }}
|
||||
{% if watch.last_error is defined and watch.last_error != False %}error{% endif %}
|
||||
{% if watch.paused is defined and watch.paused != False %}paused{% endif %}
|
||||
{% if watch.newest_history_key| int > watch.last_viewed| int %}unviewed{% endif %}">
|
||||
<td class="inline">{{ loop.index }}</td>
|
||||
<td class="inline paused-state state-{{watch.paused}}"><a href="{{url_for('index', pause=watch.uuid, tag=active_tag)}}"><img src="{{url_for('static_content', group='images', filename='pause.svg')}}" alt="Pause"/></a></td>
|
||||
|
||||
<td class="title-col inline">{{watch.title if watch.title is not none and watch.title|length > 0 else watch.url}}
|
||||
<a class="external" target="_blank" rel="noopener" href="{{ watch.url }}"></a>
|
||||
{% if watch.last_error is defined and watch.last_error != False %}
|
||||
<div class="fetch-error">{{ watch.last_error }}</div>
|
||||
{% endif %}
|
||||
{% if not active_tag %}
|
||||
<span class="watch-tag-list">{{ watch.tag}}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="last-checked">{{watch|format_last_checked_time}}</td>
|
||||
<td class="last-changed">{% if watch.history|length >= 2 and watch.last_changed %}
|
||||
{{watch.last_changed|format_timestamp_timeago}}
|
||||
{% else %}
|
||||
Not yet
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<a href="{{ url_for('api_watch_checknow', uuid=watch.uuid, tag=request.args.get('tag')) }}"
|
||||
class="pure-button button-small pure-button-primary">Recheck</a>
|
||||
<a href="{{ url_for('edit_page', uuid=watch.uuid)}}" class="pure-button button-small pure-button-primary">Edit</a>
|
||||
{% if watch.history|length >= 2 %}
|
||||
<a href="{{ url_for('diff_history_page', uuid=watch.uuid) }}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary">Diff</a>
|
||||
{% else %}
|
||||
{% if watch.history|length == 1 %}
|
||||
<a href="{{ url_for('preview_page', uuid=watch.uuid)}}" target="{{watch.uuid}}" class="pure-button button-small pure-button-primary">Preview</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<ul id="post-list-buttons">
|
||||
{% if has_unviewed %}
|
||||
<li>
|
||||
<a href="{{url_for('mark_all_viewed', tag=request.args.get('tag')) }}" class="pure-button button-tag ">Mark all viewed</a>
|
||||
</li>
|
||||
{% endif %}
|
||||
<li>
|
||||
<a href="{{ url_for('api_watch_checknow', tag=active_tag) }}" class="pure-button button-tag ">Recheck
|
||||
all {% if active_tag%}in "{{active_tag}}"{%endif%}</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="{{ url_for('index', tag=active_tag , rss=true)}}"><img id="feed-icon" src="{{url_for('static_content', group='images', filename='Generic_Feed-icon.svg')}}" height="15px"></a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,102 +0,0 @@
|
||||
from flask import url_for
|
||||
|
||||
|
||||
def test_check_access_control(app, client):
|
||||
# Still doesnt work, but this is closer.
|
||||
|
||||
with app.test_client() as c:
|
||||
# Check we dont have any password protection enabled yet.
|
||||
res = c.get(url_for("settings_page"))
|
||||
assert b"Remove password" not in res.data
|
||||
|
||||
# Enable password check.
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"password": "foobar", "minutes_between_check": 180},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Password protection enabled." in res.data
|
||||
assert b"LOG OUT" not in res.data
|
||||
|
||||
# Check we hit the login
|
||||
res = c.get(url_for("index"), follow_redirects=True)
|
||||
|
||||
assert b"Login" in res.data
|
||||
|
||||
# Menu should not be available yet
|
||||
# assert b"SETTINGS" not in res.data
|
||||
# assert b"BACKUP" not in res.data
|
||||
# assert b"IMPORT" not in res.data
|
||||
|
||||
# defaultuser@changedetection.io is actually hardcoded for now, we only use a single password
|
||||
res = c.post(
|
||||
url_for("login"),
|
||||
data={"password": "foobar"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"LOG OUT" in res.data
|
||||
res = c.get(url_for("settings_page"))
|
||||
|
||||
# Menu should be available now
|
||||
assert b"SETTINGS" in res.data
|
||||
assert b"BACKUP" in res.data
|
||||
assert b"IMPORT" in res.data
|
||||
assert b"LOG OUT" in res.data
|
||||
|
||||
# Now remove the password so other tests function, @todo this should happen before each test automatically
|
||||
res = c.get(url_for("settings_page", removepassword="yes"),
|
||||
follow_redirects=True)
|
||||
assert b"Password protection removed." in res.data
|
||||
|
||||
res = c.get(url_for("index"))
|
||||
assert b"LOG OUT" not in res.data
|
||||
|
||||
|
||||
# There was a bug where saving the settings form would submit a blank password
|
||||
def test_check_access_control_no_blank_password(app, client):
|
||||
# Still doesnt work, but this is closer.
|
||||
|
||||
with app.test_client() as c:
|
||||
# Check we dont have any password protection enabled yet.
|
||||
res = c.get(url_for("settings_page"))
|
||||
assert b"Remove password" not in res.data
|
||||
|
||||
# Enable password check.
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"password": "", "minutes_between_check": 180},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Password protection enabled." not in res.data
|
||||
assert b"Login" not in res.data
|
||||
|
||||
|
||||
# There was a bug where saving the settings form would submit a blank password
|
||||
def test_check_access_no_remote_access_to_remove_password(app, client):
|
||||
# Still doesnt work, but this is closer.
|
||||
|
||||
with app.test_client() as c:
|
||||
# Check we dont have any password protection enabled yet.
|
||||
res = c.get(url_for("settings_page"))
|
||||
assert b"Remove password" not in res.data
|
||||
|
||||
# Enable password check.
|
||||
res = c.post(
|
||||
url_for("settings_page"),
|
||||
data={"password": "password", "minutes_between_check": 180},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Password protection enabled." in res.data
|
||||
assert b"Login" in res.data
|
||||
|
||||
res = c.get(url_for("settings_page", removepassword="yes"),
|
||||
follow_redirects=True)
|
||||
assert b"Password protection removed." not in res.data
|
||||
|
||||
res = c.get(url_for("index"),
|
||||
follow_redirects=True)
|
||||
assert b"watch-table-wrapper" not in res.data
|
||||
@@ -1,121 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import live_server_setup
|
||||
|
||||
def test_setup(live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """
|
||||
{
|
||||
"employees": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Pankaj",
|
||||
"salary": "10000"
|
||||
},
|
||||
{
|
||||
"name": "David",
|
||||
"salary": "5000",
|
||||
"id": 2
|
||||
}
|
||||
],
|
||||
"boss": {
|
||||
"name": "Fat guy"
|
||||
}
|
||||
}
|
||||
"""
|
||||
with open("test-datastore/output.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """
|
||||
{
|
||||
"employees": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Pankaj",
|
||||
"salary": "10000"
|
||||
},
|
||||
{
|
||||
"name": "David",
|
||||
"salary": "5000",
|
||||
"id": 2
|
||||
}
|
||||
],
|
||||
"boss": {
|
||||
"name": "Foobar"
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
with open("test-datastore/output.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def test_check_json_filter(client, live_server):
|
||||
|
||||
json_filter = 'json:boss.name'
|
||||
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(1)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"css_filter": json_filter, "url": test_url, "tag": "", "headers": ""},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
# Check it saved
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"),
|
||||
)
|
||||
assert bytes(json_filter.encode('utf-8')) in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
# Make a change
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
# It should have 'unviewed' still
|
||||
res = client.get(url_for("index"))
|
||||
assert b'unviewed' in res.data
|
||||
|
||||
# Should not see this, because its not in the JSONPath we entered
|
||||
res = client.get(url_for("diff_history_page", uuid="first"))
|
||||
# But the change should be there, tho its hard to test the change was detected because it will show old and new versions
|
||||
assert b'Foobar' in res.data
|
||||
@@ -1,125 +0,0 @@
|
||||
import os
|
||||
import time
|
||||
from flask import url_for
|
||||
from . util import set_original_response, set_modified_response, live_server_setup
|
||||
|
||||
# Hard to just add more live server URLs when one test is already running (I think)
|
||||
# So we add our test here (was in a different file)
|
||||
def test_check_notification(client, live_server):
|
||||
|
||||
live_server_setup(live_server)
|
||||
set_original_response()
|
||||
|
||||
# Give the endpoint time to spin up
|
||||
time.sleep(3)
|
||||
|
||||
# Add our URL to the import page
|
||||
test_url = url_for('test_endpoint', _external=True)
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"1 Imported" in res.data
|
||||
|
||||
# Give the thread time to pick up the first version
|
||||
time.sleep(3)
|
||||
|
||||
# Goto the edit page, add our ignore text
|
||||
# Add our URL to the import page
|
||||
url = url_for('test_notification_endpoint', _external=True)
|
||||
notification_url = url.replace('http', 'json')
|
||||
|
||||
print (">>>> Notification URL: "+notification_url)
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={"notification_urls": notification_url,
|
||||
"url": test_url,
|
||||
"tag": "",
|
||||
"headers": "",
|
||||
"trigger_check": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
assert b"Notifications queued" in res.data
|
||||
|
||||
# Hit the edit page, be sure that we saved it
|
||||
res = client.get(
|
||||
url_for("edit_page", uuid="first"))
|
||||
assert bytes(notification_url.encode('utf-8')) in res.data
|
||||
|
||||
|
||||
# Because we hit 'send test notification on save'
|
||||
time.sleep(3)
|
||||
|
||||
# Verify what was sent as a notification, this file should exist
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
assert test_url in notification_submission
|
||||
|
||||
os.unlink("test-datastore/notification.txt")
|
||||
|
||||
|
||||
set_modified_response()
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
# Did the front end see it?
|
||||
res = client.get(
|
||||
url_for("index"))
|
||||
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
# Verify what was sent as a notification
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
# Did we see the URL that had a change, in the notification?
|
||||
assert test_url in notification_submission
|
||||
|
||||
# Re #65 - did we see our foobar.com BASE_URL ?
|
||||
#assert bytes("https://foobar.com".encode('utf-8')) in notification_submission
|
||||
|
||||
|
||||
## Now configure something clever, we go into custom config (non-default) mode
|
||||
|
||||
with open("test-datastore/output.txt", "w") as f:
|
||||
f.write(";jasdhflkjadshf kjhsdfkjl ahslkjf haslkjd hfaklsj hf\njl;asdhfkasj stuff we will detect\n")
|
||||
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"notification_title": "New ChangeDetection.io Notification - {watch_url}",
|
||||
"notification_body": "{base_url}\n{watch_url}\n{preview_url}\n{diff_url}\n{current_snapshot}\n:-)",
|
||||
"notification_urls": "json://foobar.com", #Re #143 should not see that it sent without [test checkbox]
|
||||
"minutes_between_check": 180},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Settings updated." in res.data
|
||||
# Re #143 - should not see this if we didnt hit the test box
|
||||
assert b"Notifications queued" not in res.data
|
||||
|
||||
# Trigger a check
|
||||
client.get(url_for("api_watch_checknow"), follow_redirects=True)
|
||||
|
||||
# Give the thread time to pick it up
|
||||
time.sleep(3)
|
||||
|
||||
# Did the front end see it?
|
||||
res = client.get(
|
||||
url_for("index"))
|
||||
|
||||
assert bytes("just now".encode('utf-8')) in res.data
|
||||
|
||||
with open("test-datastore/notification.txt", "r") as f:
|
||||
notification_submission = f.read()
|
||||
|
||||
assert "diff/" in notification_submission
|
||||
assert "preview/" in notification_submission
|
||||
assert ":-)" in notification_submission
|
||||
assert "New ChangeDetection.io Notification - {}".format(test_url) in notification_submission
|
||||
# This should insert the {current_snapshot}
|
||||
assert "stuff we will detect" in notification_submission
|
||||
@@ -1,60 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
|
||||
def set_original_response():
|
||||
test_return_data = """<html>
|
||||
<head><title>head title</title></head>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>Which is across multiple lines</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/output.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
return None
|
||||
|
||||
def set_modified_response():
|
||||
test_return_data = """<html>
|
||||
<head><title>modified head title</title></head>
|
||||
<body>
|
||||
Some initial text</br>
|
||||
<p>which has this one new line</p>
|
||||
</br>
|
||||
So let's see what happens. </br>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
with open("test-datastore/output.txt", "w") as f:
|
||||
f.write(test_return_data)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def live_server_setup(live_server):
|
||||
|
||||
@live_server.app.route('/test-endpoint')
|
||||
def test_endpoint():
|
||||
# Tried using a global var here but didn't seem to work, so reading from a file instead.
|
||||
with open("test-datastore/output.txt", "r") as f:
|
||||
return f.read()
|
||||
|
||||
# Where we POST to as a notification
|
||||
@live_server.app.route('/test_notification_endpoint', methods=['POST'])
|
||||
def test_notification_endpoint():
|
||||
from flask import request
|
||||
|
||||
with open("test-datastore/notification.txt", "wb") as f:
|
||||
# Debug method, dump all POST to file also, used to prove #65
|
||||
data = request.stream.read()
|
||||
if data != None:
|
||||
f.write(data)
|
||||
|
||||
print("\n>> Test notification endpoint was hit.\n")
|
||||
return "Text was set"
|
||||
|
||||
live_server.start()
|
||||
@@ -1,78 +0,0 @@
|
||||
import threading
|
||||
import queue
|
||||
|
||||
# Requests for checking on the site use a pool of thread Workers managed by a Queue.
|
||||
class update_worker(threading.Thread):
|
||||
current_uuid = None
|
||||
|
||||
def __init__(self, q, notification_q, app, datastore, *args, **kwargs):
|
||||
self.q = q
|
||||
self.app = app
|
||||
self.notification_q = notification_q
|
||||
self.datastore = datastore
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def run(self):
|
||||
from backend import fetch_site_status
|
||||
|
||||
update_handler = fetch_site_status.perform_site_check(datastore=self.datastore)
|
||||
|
||||
while not self.app.config.exit.is_set():
|
||||
|
||||
try:
|
||||
uuid = self.q.get(block=False)
|
||||
except queue.Empty:
|
||||
pass
|
||||
|
||||
else:
|
||||
self.current_uuid = uuid
|
||||
|
||||
if uuid in list(self.datastore.data['watching'].keys()):
|
||||
try:
|
||||
changed_detected, result, contents = update_handler.run(uuid)
|
||||
|
||||
except PermissionError as e:
|
||||
self.app.logger.error("File permission error updating", uuid, str(e))
|
||||
except Exception as e:
|
||||
self.app.logger.error("Exception reached", uuid, str(e))
|
||||
else:
|
||||
if result:
|
||||
try:
|
||||
self.datastore.update_watch(uuid=uuid, update_obj=result)
|
||||
if changed_detected:
|
||||
|
||||
# A change was detected
|
||||
newest_version_file_contents = ""
|
||||
self.datastore.save_history_text(uuid=uuid, contents=contents, result_obj=result)
|
||||
watch = self.datastore.data['watching'][uuid]
|
||||
|
||||
newest_key = self.datastore.get_newest_history_key(uuid)
|
||||
if newest_key:
|
||||
with open(watch['history'][newest_key], 'r') as f:
|
||||
newest_version_file_contents = f.read().strip()
|
||||
|
||||
n_object = {
|
||||
'watch_url': self.datastore.data['watching'][uuid]['url'],
|
||||
'uuid': uuid,
|
||||
'current_snapshot': newest_version_file_contents
|
||||
}
|
||||
|
||||
# Did it have any notification alerts to hit?
|
||||
if len(watch['notification_urls']):
|
||||
print("Processing notifications for UUID: {}".format(uuid))
|
||||
n_object['notification_urls'] = watch['notification_urls']
|
||||
self.notification_q.put(n_object)
|
||||
|
||||
# No? maybe theres a global setting, queue them all
|
||||
elif len(self.datastore.data['settings']['application']['notification_urls']):
|
||||
print("Processing GLOBAL notifications for UUID: {}".format(uuid))
|
||||
n_object['notification_urls'] = self.datastore.data['settings']['application']['notification_urls']
|
||||
self.notification_q.put(n_object)
|
||||
|
||||
except Exception as e:
|
||||
print("!!!! Exception in update_worker !!!\n", e)
|
||||
|
||||
self.current_uuid = None # Done
|
||||
self.q.task_done()
|
||||
|
||||
self.app.config.exit.wait(1)
|
||||
@@ -1,92 +1,41 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Launch as a eventlet.wsgi server instance.
|
||||
# Entry-point for running from the CLI when not installed via Pip, Pip will handle the console_scripts entry_points's from setup.py
|
||||
# It's recommended to use `pip3 install changedetection.io` and start with `changedetection.py` instead, it will be linkd to your global path.
|
||||
# or Docker.
|
||||
# Read more https://github.com/dgtlmoon/changedetection.io/wiki
|
||||
|
||||
import getopt
|
||||
from changedetectionio import changedetection
|
||||
import multiprocessing
|
||||
import signal
|
||||
import os
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
import backend
|
||||
def sigchld_handler(_signo, _stack_frame):
|
||||
import sys
|
||||
print('Shutdown: Got SIGCHLD')
|
||||
# https://stackoverflow.com/questions/40453496/python-multiprocessing-capturing-signals-to-restart-child-processes-or-shut-do
|
||||
pid, status = os.waitpid(-1, os.WNOHANG | os.WUNTRACED | os.WCONTINUED)
|
||||
|
||||
from backend import store
|
||||
|
||||
def main(argv):
|
||||
ssl_mode = False
|
||||
port = 5000
|
||||
do_cleanup = False
|
||||
|
||||
# Must be absolute so that send_from_directory doesnt try to make it relative to backend/
|
||||
datastore_path = os.path.join(os.getcwd(), "datastore")
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(argv, "csd:p:", "port")
|
||||
except getopt.GetoptError:
|
||||
print('backend.py -s SSL enable -p [port] -d [datastore path]')
|
||||
sys.exit(2)
|
||||
|
||||
for opt, arg in opts:
|
||||
# if opt == '--purge':
|
||||
# Remove history, the actual files you need to delete manually.
|
||||
# for uuid, watch in datastore.data['watching'].items():
|
||||
# watch.update({'history': {}, 'last_checked': 0, 'last_changed': 0, 'previous_md5': None})
|
||||
|
||||
if opt == '-s':
|
||||
ssl_mode = True
|
||||
|
||||
if opt == '-p':
|
||||
port = int(arg)
|
||||
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
|
||||
# isnt there some @thingy to attach to each route to tell it, that this route needs a datastore
|
||||
app_config = {'datastore_path': datastore_path}
|
||||
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'])
|
||||
app = backend.changedetection_app(app_config, datastore)
|
||||
|
||||
# Go into cleanup mode
|
||||
if do_cleanup:
|
||||
datastore.remove_unused_snapshots()
|
||||
|
||||
app.config['datastore_path'] = datastore_path
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def inject_version():
|
||||
return dict(right_sticky="v{}".format(datastore.data['version_tag']),
|
||||
new_version_available=app.config['NEW_VERSION_AVAILABLE'],
|
||||
has_password=datastore.data['settings']['application']['password'] != False
|
||||
)
|
||||
|
||||
# Proxy sub-directory support
|
||||
# Set environment var USE_X_SETTINGS=1 on this script
|
||||
# And then in your proxy_pass settings
|
||||
#
|
||||
# proxy_set_header Host "localhost";
|
||||
# proxy_set_header X-Forwarded-Prefix /app;
|
||||
|
||||
if os.getenv('USE_X_SETTINGS'):
|
||||
print ("USE_X_SETTINGS is ENABLED\n")
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
|
||||
|
||||
if ssl_mode:
|
||||
# @todo finalise SSL config, but this should get you in the right direction if you need it.
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen(('', port)),
|
||||
certfile='cert.pem',
|
||||
keyfile='privkey.pem',
|
||||
server_side=True), app)
|
||||
|
||||
else:
|
||||
eventlet.wsgi.server(eventlet.listen(('', port)), app)
|
||||
print('Sub-process: pid %d status %d' % (pid, status))
|
||||
if status != 0:
|
||||
sys.exit(1)
|
||||
|
||||
raise SystemExit
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
||||
|
||||
#signal.signal(signal.SIGCHLD, sigchld_handler)
|
||||
|
||||
# The only way I could find to get Flask to shutdown, is to wrap it and then rely on the subsystem issuing SIGTERM/SIGKILL
|
||||
parse_process = multiprocessing.Process(target=changedetection.main)
|
||||
parse_process.daemon = True
|
||||
parse_process.start()
|
||||
import time
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
#parse_process.terminate() not needed, because this process will issue it to the sub-process anyway
|
||||
print ("Exited - CTRL+C")
|
||||
|
||||
2
changedetectionio/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
test-datastore
|
||||
package-lock.json
|
||||
1478
changedetectionio/__init__.py
Normal file
0
changedetectionio/api/__init__.py
Normal file
124
changedetectionio/api/api_v1.py
Normal file
@@ -0,0 +1,124 @@
|
||||
from flask_restful import abort, Resource
|
||||
from flask import request, make_response
|
||||
import validators
|
||||
from . import auth
|
||||
|
||||
|
||||
|
||||
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
|
||||
|
||||
class Watch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
self.update_q = kwargs['update_q']
|
||||
|
||||
# Get information about a single watch, excluding the history list (can be large)
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>
|
||||
# ?recheck=true
|
||||
@auth.check_token
|
||||
def get(self, uuid):
|
||||
from copy import deepcopy
|
||||
watch = deepcopy(self.datastore.data['watching'].get(uuid))
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if request.args.get('recheck'):
|
||||
self.update_q.put((1, uuid))
|
||||
return "OK", 200
|
||||
|
||||
# Return without history, get that via another API call
|
||||
watch['history_n'] = watch.history_n
|
||||
return watch
|
||||
|
||||
@auth.check_token
|
||||
def delete(self, uuid):
|
||||
if not self.datastore.data['watching'].get(uuid):
|
||||
abort(400, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
self.datastore.delete(uuid)
|
||||
return 'OK', 204
|
||||
|
||||
|
||||
class WatchHistory(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
# Get a list of available history for a watch by UUID
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history
|
||||
def get(self, uuid):
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
return watch.history, 200
|
||||
|
||||
|
||||
class WatchSingleHistory(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
|
||||
# Read a given history snapshot and return its content
|
||||
# <string:timestamp> or "latest"
|
||||
# curl http://localhost:4000/api/v1/watch/<string:uuid>/history/<int:timestamp>
|
||||
@auth.check_token
|
||||
def get(self, uuid, timestamp):
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
abort(404, message='No watch exists with the UUID of {}'.format(uuid))
|
||||
|
||||
if not len(watch.history):
|
||||
abort(404, message='Watch found but no history exists for the UUID {}'.format(uuid))
|
||||
|
||||
if timestamp == 'latest':
|
||||
timestamp = list(watch.history.keys())[-1]
|
||||
|
||||
with open(watch.history[timestamp], 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
response = make_response(content, 200)
|
||||
response.mimetype = "text/plain"
|
||||
return response
|
||||
|
||||
|
||||
class CreateWatch(Resource):
|
||||
def __init__(self, **kwargs):
|
||||
# datastore is a black box dependency
|
||||
self.datastore = kwargs['datastore']
|
||||
self.update_q = kwargs['update_q']
|
||||
|
||||
@auth.check_token
|
||||
def post(self):
|
||||
# curl http://localhost:4000/api/v1/watch -H "Content-Type: application/json" -d '{"url": "https://my-nice.com", "tag": "one, two" }'
|
||||
json_data = request.get_json()
|
||||
tag = json_data['tag'].strip() if json_data.get('tag') else ''
|
||||
|
||||
if not validators.url(json_data['url'].strip()):
|
||||
return "Invalid or unsupported URL", 400
|
||||
|
||||
extras = {'title': json_data['title'].strip()} if json_data.get('title') else {}
|
||||
|
||||
new_uuid = self.datastore.add_watch(url=json_data['url'].strip(), tag=tag, extras=extras)
|
||||
self.update_q.put((1, new_uuid))
|
||||
return {'uuid': new_uuid}, 201
|
||||
|
||||
# Return concise list of available watches and some very basic info
|
||||
# curl http://localhost:4000/api/v1/watch|python -mjson.tool
|
||||
# ?recheck_all=1 to recheck all
|
||||
@auth.check_token
|
||||
def get(self):
|
||||
list = {}
|
||||
for k, v in self.datastore.data['watching'].items():
|
||||
list[k] = {'url': v['url'],
|
||||
'title': v['title'],
|
||||
'last_checked': v['last_checked'],
|
||||
'last_changed': v.last_changed,
|
||||
'last_error': v['last_error']}
|
||||
|
||||
if request.args.get('recheck_all'):
|
||||
for uuid in self.datastore.data['watching'].keys():
|
||||
self.update_q.put((1, uuid))
|
||||
return {'status': "OK"}, 200
|
||||
|
||||
return list, 200
|
||||
33
changedetectionio/api/auth.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from flask import request, make_response, jsonify
|
||||
from functools import wraps
|
||||
|
||||
|
||||
# Simple API auth key comparison
|
||||
# @todo - Maybe short lived token in the future?
|
||||
|
||||
def check_token(f):
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
datastore = args[0].datastore
|
||||
|
||||
config_api_token_enabled = datastore.data['settings']['application'].get('api_access_token_enabled')
|
||||
if not config_api_token_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
api_key_header = request.headers['x-api-key']
|
||||
except KeyError:
|
||||
return make_response(
|
||||
jsonify("No authorization x-api-key header."), 403
|
||||
)
|
||||
|
||||
config_api_token = datastore.data['settings']['application'].get('api_access_token')
|
||||
|
||||
if api_key_header != config_api_token:
|
||||
return make_response(
|
||||
jsonify("Invalid access - API key invalid."), 403
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
11
changedetectionio/apprise_asset.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import apprise
|
||||
|
||||
# Create our AppriseAsset and populate it with some of our new values:
|
||||
# https://github.com/caronc/apprise/wiki/Development_API#the-apprise-asset-object
|
||||
asset = apprise.AppriseAsset(
|
||||
image_url_logo='https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
|
||||
)
|
||||
|
||||
asset.app_id = "changedetection.io"
|
||||
asset.app_desc = "ChangeDetection.io best and simplest website monitoring and change detection"
|
||||
asset.app_url = "https://changedetection.io"
|
||||
126
changedetectionio/changedetection.py
Executable file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Launch as a eventlet.wsgi server instance.
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
import eventlet.wsgi
|
||||
from . import store, changedetection_app, content_fetcher
|
||||
from . import __version__
|
||||
|
||||
# Only global so we can access it in the signal handler
|
||||
datastore = None
|
||||
app = None
|
||||
|
||||
def sigterm_handler(_signo, _stack_frame):
|
||||
global app
|
||||
global datastore
|
||||
# app.config.exit.set()
|
||||
print('Shutdown: Got SIGTERM, DB saved to disk')
|
||||
datastore.sync_to_json()
|
||||
# raise SystemExit
|
||||
|
||||
def main():
|
||||
global datastore
|
||||
global app
|
||||
ssl_mode = False
|
||||
host = ''
|
||||
port = os.environ.get('PORT') or 5000
|
||||
do_cleanup = False
|
||||
datastore_path = None
|
||||
|
||||
# On Windows, create and use a default path.
|
||||
if os.name == 'nt':
|
||||
datastore_path = os.path.expandvars(r'%APPDATA%\changedetection.io')
|
||||
os.makedirs(datastore_path, exist_ok=True)
|
||||
else:
|
||||
# Must be absolute so that send_from_directory doesnt try to make it relative to backend/
|
||||
datastore_path = os.path.join(os.getcwd(), "../datastore")
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "Ccsd:h:p:", "port")
|
||||
except getopt.GetoptError:
|
||||
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path]')
|
||||
sys.exit(2)
|
||||
|
||||
create_datastore_dir = False
|
||||
|
||||
for opt, arg in opts:
|
||||
if opt == '-s':
|
||||
ssl_mode = True
|
||||
|
||||
if opt == '-h':
|
||||
host = arg
|
||||
|
||||
if opt == '-p':
|
||||
port = int(arg)
|
||||
|
||||
if opt == '-d':
|
||||
datastore_path = arg
|
||||
|
||||
# Cleanup (remove text files that arent in the index)
|
||||
if opt == '-c':
|
||||
do_cleanup = True
|
||||
|
||||
# Create the datadir if it doesnt exist
|
||||
if opt == '-C':
|
||||
create_datastore_dir = True
|
||||
|
||||
# isnt there some @thingy to attach to each route to tell it, that this route needs a datastore
|
||||
app_config = {'datastore_path': datastore_path}
|
||||
|
||||
if not os.path.isdir(app_config['datastore_path']):
|
||||
if create_datastore_dir:
|
||||
os.mkdir(app_config['datastore_path'])
|
||||
else:
|
||||
print(
|
||||
"ERROR: Directory path for the datastore '{}' does not exist, cannot start, please make sure the directory exists or specify a directory with the -d option.\n"
|
||||
"Or use the -C parameter to create the directory.".format(app_config['datastore_path']), file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
datastore = store.ChangeDetectionStore(datastore_path=app_config['datastore_path'], version_tag=__version__)
|
||||
app = changedetection_app(app_config, datastore)
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
|
||||
# Go into cleanup mode
|
||||
if do_cleanup:
|
||||
datastore.remove_unused_snapshots()
|
||||
|
||||
app.config['datastore_path'] = datastore_path
|
||||
|
||||
|
||||
@app.context_processor
|
||||
def inject_version():
|
||||
return dict(right_sticky="v{}".format(datastore.data['version_tag']),
|
||||
new_version_available=app.config['NEW_VERSION_AVAILABLE'],
|
||||
has_password=datastore.data['settings']['application']['password'] != False
|
||||
)
|
||||
|
||||
# Proxy sub-directory support
|
||||
# Set environment var USE_X_SETTINGS=1 on this script
|
||||
# And then in your proxy_pass settings
|
||||
#
|
||||
# proxy_set_header Host "localhost";
|
||||
# proxy_set_header X-Forwarded-Prefix /app;
|
||||
|
||||
if os.getenv('USE_X_SETTINGS'):
|
||||
print ("USE_X_SETTINGS is ENABLED\n")
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_prefix=1, x_host=1)
|
||||
|
||||
if ssl_mode:
|
||||
# @todo finalise SSL config, but this should get you in the right direction if you need it.
|
||||
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen((host, port)),
|
||||
certfile='cert.pem',
|
||||
keyfile='privkey.pem',
|
||||
server_side=True), app)
|
||||
|
||||
else:
|
||||
eventlet.wsgi.server(eventlet.listen((host, int(port))), app)
|
||||
|
||||
615
changedetectionio/content_fetcher.py
Normal file
@@ -0,0 +1,615 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import chardet
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import time
|
||||
import sys
|
||||
|
||||
|
||||
class Non200ErrorCodeReceived(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None, xpath_data=None, page_html=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
self.xpath_data = xpath_data
|
||||
self.page_text = None
|
||||
|
||||
if page_html:
|
||||
from changedetectionio import html_tools
|
||||
self.page_text = html_tools.html_to_text(page_html)
|
||||
return
|
||||
|
||||
|
||||
class JSActionExceptions(Exception):
|
||||
def __init__(self, status_code, url, screenshot, message=''):
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
self.message = message
|
||||
return
|
||||
|
||||
class PageUnloadable(Exception):
|
||||
def __init__(self, status_code, url, screenshot=False, message=False):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
self.message = message
|
||||
return
|
||||
|
||||
class EmptyReply(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
return
|
||||
|
||||
class ScreenshotUnavailable(Exception):
|
||||
def __init__(self, status_code, url, page_html=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
if page_html:
|
||||
from html_tools import html_to_text
|
||||
self.page_text = html_to_text(page_html)
|
||||
return
|
||||
|
||||
class ReplyWithContentButNoText(Exception):
|
||||
def __init__(self, status_code, url, screenshot=None):
|
||||
# Set this so we can use it in other parts of the app
|
||||
self.status_code = status_code
|
||||
self.url = url
|
||||
self.screenshot = screenshot
|
||||
return
|
||||
|
||||
class Fetcher():
|
||||
error = None
|
||||
status_code = None
|
||||
content = None
|
||||
headers = None
|
||||
|
||||
fetcher_description = "No description"
|
||||
webdriver_js_execute_code = None
|
||||
xpath_element_js = """
|
||||
// Include the getXpath script directly, easier than fetching
|
||||
!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e=e||self).getXPath=n()}(this,function(){return function(e){var n=e;if(n&&n.id)return'//*[@id="'+n.id+'"]';for(var o=[];n&&Node.ELEMENT_NODE===n.nodeType;){for(var i=0,r=!1,d=n.previousSibling;d;)d.nodeType!==Node.DOCUMENT_TYPE_NODE&&d.nodeName===n.nodeName&&i++,d=d.previousSibling;for(d=n.nextSibling;d;){if(d.nodeName===n.nodeName){r=!0;break}d=d.nextSibling}o.push((n.prefix?n.prefix+":":"")+n.localName+(i||r?"["+(i+1)+"]":"")),n=n.parentNode}return o.length?"/"+o.reverse().join("/"):""}});
|
||||
|
||||
|
||||
const findUpTag = (el) => {
|
||||
let r = el
|
||||
chained_css = [];
|
||||
depth=0;
|
||||
|
||||
// Strategy 1: Keep going up until we hit an ID tag, imagine it's like #list-widget div h4
|
||||
while (r.parentNode) {
|
||||
if(depth==5) {
|
||||
break;
|
||||
}
|
||||
if('' !==r.id) {
|
||||
chained_css.unshift("#"+CSS.escape(r.id));
|
||||
final_selector= chained_css.join(' > ');
|
||||
// Be sure theres only one, some sites have multiples of the same ID tag :-(
|
||||
if (window.document.querySelectorAll(final_selector).length ==1 ) {
|
||||
return final_selector;
|
||||
}
|
||||
return null;
|
||||
} else {
|
||||
chained_css.unshift(r.tagName.toLowerCase());
|
||||
}
|
||||
r=r.parentNode;
|
||||
depth+=1;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
// @todo - if it's SVG or IMG, go into image diff mode
|
||||
var elements = window.document.querySelectorAll("div,span,form,table,tbody,tr,td,a,p,ul,li,h1,h2,h3,h4, header, footer, section, article, aside, details, main, nav, section, summary");
|
||||
var size_pos=[];
|
||||
// after page fetch, inject this JS
|
||||
// build a map of all elements and their positions (maybe that only include text?)
|
||||
var bbox;
|
||||
for (var i = 0; i < elements.length; i++) {
|
||||
bbox = elements[i].getBoundingClientRect();
|
||||
|
||||
// forget really small ones
|
||||
if (bbox['width'] <20 && bbox['height'] < 20 ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// @todo the getXpath kind of sucks, it doesnt know when there is for example just one ID sometimes
|
||||
// it should not traverse when we know we can anchor off just an ID one level up etc..
|
||||
// maybe, get current class or id, keep traversing up looking for only class or id until there is just one match
|
||||
|
||||
// 1st primitive - if it has class, try joining it all and select, if theres only one.. well thats us.
|
||||
xpath_result=false;
|
||||
|
||||
try {
|
||||
var d= findUpTag(elements[i]);
|
||||
if (d) {
|
||||
xpath_result =d;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
// You could swap it and default to getXpath and then try the smarter one
|
||||
// default back to the less intelligent one
|
||||
if (!xpath_result) {
|
||||
try {
|
||||
// I've seen on FB and eBay that this doesnt work
|
||||
// ReferenceError: getXPath is not defined at eval (eval at evaluate (:152:29), <anonymous>:67:20) at UtilityScript.evaluate (<anonymous>:159:18) at UtilityScript.<anonymous> (<anonymous>:1:44)
|
||||
xpath_result = getXPath(elements[i]);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if(window.getComputedStyle(elements[i]).visibility === "hidden") {
|
||||
continue;
|
||||
}
|
||||
|
||||
size_pos.push({
|
||||
xpath: xpath_result,
|
||||
width: Math.round(bbox['width']),
|
||||
height: Math.round(bbox['height']),
|
||||
left: Math.floor(bbox['left']),
|
||||
top: Math.floor(bbox['top']),
|
||||
childCount: elements[i].childElementCount
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// inject the current one set in the css_filter, which may be a CSS rule
|
||||
// used for displaying the current one in VisualSelector, where its not one we generated.
|
||||
if (css_filter.length) {
|
||||
q=false;
|
||||
try {
|
||||
// is it xpath?
|
||||
if (css_filter.startsWith('/') || css_filter.startsWith('xpath:')) {
|
||||
q=document.evaluate(css_filter.replace('xpath:',''), document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue;
|
||||
} else {
|
||||
q=document.querySelector(css_filter);
|
||||
}
|
||||
} catch (e) {
|
||||
// Maybe catch DOMException and alert?
|
||||
console.log(e);
|
||||
}
|
||||
bbox=false;
|
||||
if(q) {
|
||||
bbox = q.getBoundingClientRect();
|
||||
}
|
||||
|
||||
if (bbox && bbox['width'] >0 && bbox['height']>0) {
|
||||
size_pos.push({
|
||||
xpath: css_filter,
|
||||
width: bbox['width'],
|
||||
height: bbox['height'],
|
||||
left: bbox['left'],
|
||||
top: bbox['top'],
|
||||
childCount: q.childElementCount
|
||||
});
|
||||
}
|
||||
}
|
||||
// Window.width required for proper scaling in the frontend
|
||||
return {'size_pos':size_pos, 'browser_width': window.innerWidth};
|
||||
"""
|
||||
xpath_data = None
|
||||
|
||||
# Will be needed in the future by the VisualSelector, always get this where possible.
|
||||
screenshot = False
|
||||
system_http_proxy = os.getenv('HTTP_PROXY')
|
||||
system_https_proxy = os.getenv('HTTPS_PROXY')
|
||||
|
||||
# Time ONTOP of the system defined env minimum time
|
||||
render_extract_delay = 0
|
||||
|
||||
@abstractmethod
|
||||
def get_error(self):
|
||||
return self.error
|
||||
|
||||
@abstractmethod
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_css_filter=None):
|
||||
# Should set self.error, self.status_code and self.content
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def quit(self):
|
||||
return
|
||||
|
||||
@abstractmethod
|
||||
def get_last_status_code(self):
|
||||
return self.status_code
|
||||
|
||||
@abstractmethod
|
||||
# Return true/false if this checker is ready to run, in the case it needs todo some special config check etc
|
||||
def is_ready(self):
|
||||
return True
|
||||
|
||||
|
||||
# Maybe for the future, each fetcher provides its own diff output, could be used for text, image
|
||||
# the current one would return javascript output (as we use JS to generate the diff)
|
||||
#
|
||||
def available_fetchers():
|
||||
# See the if statement at the bottom of this file for how we switch between playwright and webdriver
|
||||
import inspect
|
||||
p = []
|
||||
for name, obj in inspect.getmembers(sys.modules[__name__], inspect.isclass):
|
||||
if inspect.isclass(obj):
|
||||
# @todo html_ is maybe better as fetcher_ or something
|
||||
# In this case, make sure to edit the default one in store.py and fetch_site_status.py
|
||||
if name.startswith('html_'):
|
||||
t = tuple([name, obj.fetcher_description])
|
||||
p.append(t)
|
||||
|
||||
return p
|
||||
|
||||
|
||||
class base_html_playwright(Fetcher):
|
||||
fetcher_description = "Playwright {}/Javascript".format(
|
||||
os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').capitalize()
|
||||
)
|
||||
if os.getenv("PLAYWRIGHT_DRIVER_URL"):
|
||||
fetcher_description += " via '{}'".format(os.getenv("PLAYWRIGHT_DRIVER_URL"))
|
||||
|
||||
browser_type = ''
|
||||
command_executor = ''
|
||||
|
||||
# Configs for Proxy setup
|
||||
# In the ENV vars, is prefixed with "playwright_proxy_", so it is for example "playwright_proxy_server"
|
||||
playwright_proxy_settings_mappings = ['bypass', 'server', 'username', 'password']
|
||||
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.browser_type = os.getenv("PLAYWRIGHT_BROWSER_TYPE", 'chromium').strip('"')
|
||||
self.command_executor = os.getenv(
|
||||
"PLAYWRIGHT_DRIVER_URL",
|
||||
'ws://playwright-chrome:3000'
|
||||
).strip('"')
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
for k in self.playwright_proxy_settings_mappings:
|
||||
v = os.getenv('playwright_proxy_' + k, False)
|
||||
if v:
|
||||
proxy_args[k] = v.strip('"')
|
||||
|
||||
if proxy_args:
|
||||
self.proxy = proxy_args
|
||||
|
||||
# allow per-watch proxy selection override
|
||||
if proxy_override:
|
||||
# https://playwright.dev/docs/network#http-proxy
|
||||
from urllib.parse import urlparse
|
||||
parsed = urlparse(proxy_override)
|
||||
proxy_url = "{}://{}:{}".format(parsed.scheme, parsed.hostname, parsed.port)
|
||||
self.proxy = {'server': proxy_url}
|
||||
if parsed.username:
|
||||
self.proxy['username'] = parsed.username
|
||||
if parsed.password:
|
||||
self.proxy['password'] = parsed.password
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_css_filter=None):
|
||||
|
||||
from playwright.sync_api import sync_playwright
|
||||
import playwright._impl._api_types
|
||||
from playwright._impl._api_types import Error, TimeoutError
|
||||
response = None
|
||||
with sync_playwright() as p:
|
||||
browser_type = getattr(p, self.browser_type)
|
||||
|
||||
# Seemed to cause a connection Exception even tho I can see it connect
|
||||
# self.browser = browser_type.connect(self.command_executor, timeout=timeout*1000)
|
||||
# 60,000 connection timeout only
|
||||
browser = browser_type.connect_over_cdp(self.command_executor, timeout=60000)
|
||||
|
||||
# Set user agent to prevent Cloudflare from blocking the browser
|
||||
# Use the default one configured in the App.py model that's passed from fetch_site_status.py
|
||||
context = browser.new_context(
|
||||
user_agent=request_headers['User-Agent'] if request_headers.get('User-Agent') else 'Mozilla/5.0',
|
||||
proxy=self.proxy,
|
||||
# This is needed to enable JavaScript execution on GitHub and others
|
||||
bypass_csp=True,
|
||||
# Should never be needed
|
||||
accept_downloads=False
|
||||
)
|
||||
|
||||
if len(request_headers):
|
||||
context.set_extra_http_headers(request_headers)
|
||||
|
||||
page = context.new_page()
|
||||
try:
|
||||
page.set_default_navigation_timeout(90000)
|
||||
page.set_default_timeout(90000)
|
||||
|
||||
# Listen for all console events and handle errors
|
||||
page.on("console", lambda msg: print(f"Playwright console: Watch URL: {url} {msg.type}: {msg.text} {msg.args}"))
|
||||
|
||||
# Bug - never set viewport size BEFORE page.goto
|
||||
|
||||
# Waits for the next navigation. Using Python context manager
|
||||
# prevents a race condition between clicking and waiting for a navigation.
|
||||
with page.expect_navigation():
|
||||
response = page.goto(url, wait_until='load')
|
||||
|
||||
|
||||
except playwright._impl._api_types.TimeoutError as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
# This can be ok, we will try to grab what we could retrieve
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
print("other exception when page.goto")
|
||||
print(str(e))
|
||||
context.close()
|
||||
browser.close()
|
||||
raise PageUnloadable(url=url, status_code=None, message=e.message)
|
||||
|
||||
if response is None:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("response object was none")
|
||||
raise EmptyReply(url=url, status_code=None)
|
||||
|
||||
# Bug 2(?) Set the viewport size AFTER loading the page
|
||||
page.set_viewport_size({"width": 1280, "height": 1024})
|
||||
extra_wait = int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay
|
||||
time.sleep(extra_wait)
|
||||
|
||||
if self.webdriver_js_execute_code is not None:
|
||||
try:
|
||||
page.evaluate(self.webdriver_js_execute_code)
|
||||
except Exception as e:
|
||||
# Is it possible to get a screenshot?
|
||||
error_screenshot = False
|
||||
try:
|
||||
page.screenshot(type='jpeg',
|
||||
clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024},
|
||||
quality=1)
|
||||
|
||||
# The actual screenshot
|
||||
error_screenshot = page.screenshot(type='jpeg',
|
||||
full_page=True,
|
||||
quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
||||
except Exception as s:
|
||||
pass
|
||||
|
||||
raise JSActionExceptions(status_code=response.status, screenshot=error_screenshot, message=str(e), url=url)
|
||||
|
||||
self.content = page.content()
|
||||
self.status_code = response.status
|
||||
self.headers = response.all_headers()
|
||||
|
||||
if current_css_filter is not None:
|
||||
page.evaluate("var css_filter={}".format(json.dumps(current_css_filter)))
|
||||
else:
|
||||
page.evaluate("var css_filter=''")
|
||||
|
||||
self.xpath_data = page.evaluate("async () => {" + self.xpath_element_js + "}")
|
||||
|
||||
# Bug 3 in Playwright screenshot handling
|
||||
# Some bug where it gives the wrong screenshot size, but making a request with the clip set first seems to solve it
|
||||
# JPEG is better here because the screenshots can be very very large
|
||||
|
||||
# Screenshots also travel via the ws:// (websocket) meaning that the binary data is base64 encoded
|
||||
# which will significantly increase the IO size between the server and client, it's recommended to use the lowest
|
||||
# acceptable screenshot quality here
|
||||
try:
|
||||
# Quality set to 1 because it's not used, just used as a work-around for a bug, no need to change this.
|
||||
page.screenshot(type='jpeg', clip={'x': 1.0, 'y': 1.0, 'width': 1280, 'height': 1024}, quality=1)
|
||||
# The actual screenshot
|
||||
self.screenshot = page.screenshot(type='jpeg', full_page=True, quality=int(os.getenv("PLAYWRIGHT_SCREENSHOT_QUALITY", 72)))
|
||||
except Exception as e:
|
||||
context.close()
|
||||
browser.close()
|
||||
raise ScreenshotUnavailable(url=url, status_code=None)
|
||||
|
||||
if len(self.content.strip()) == 0:
|
||||
context.close()
|
||||
browser.close()
|
||||
print("Content was empty")
|
||||
raise EmptyReply(url=url, status_code=None, screenshot=self.screenshot)
|
||||
|
||||
context.close()
|
||||
browser.close()
|
||||
|
||||
if not ignore_status_codes and self.status_code!=200:
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=self.status_code, page_html=self.content, screenshot=self.screenshot)
|
||||
|
||||
class base_html_webdriver(Fetcher):
|
||||
if os.getenv("WEBDRIVER_URL"):
|
||||
fetcher_description = "WebDriver Chrome/Javascript via '{}'".format(os.getenv("WEBDRIVER_URL"))
|
||||
else:
|
||||
fetcher_description = "WebDriver Chrome/Javascript"
|
||||
|
||||
command_executor = ''
|
||||
|
||||
# Configs for Proxy setup
|
||||
# In the ENV vars, is prefixed with "webdriver_", so it is for example "webdriver_sslProxy"
|
||||
selenium_proxy_settings_mappings = ['proxyType', 'ftpProxy', 'httpProxy', 'noProxy',
|
||||
'proxyAutoconfigUrl', 'sslProxy', 'autodetect',
|
||||
'socksProxy', 'socksVersion', 'socksUsername', 'socksPassword']
|
||||
proxy = None
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
from selenium.webdriver.common.proxy import Proxy as SeleniumProxy
|
||||
|
||||
# .strip('"') is going to save someone a lot of time when they accidently wrap the env value
|
||||
self.command_executor = os.getenv("WEBDRIVER_URL", 'http://browser-chrome:4444/wd/hub').strip('"')
|
||||
|
||||
# If any proxy settings are enabled, then we should setup the proxy object
|
||||
proxy_args = {}
|
||||
for k in self.selenium_proxy_settings_mappings:
|
||||
v = os.getenv('webdriver_' + k, False)
|
||||
if v:
|
||||
proxy_args[k] = v.strip('"')
|
||||
|
||||
# Map back standard HTTP_ and HTTPS_PROXY to webDriver httpProxy/sslProxy
|
||||
if not proxy_args.get('webdriver_httpProxy') and self.system_http_proxy:
|
||||
proxy_args['httpProxy'] = self.system_http_proxy
|
||||
if not proxy_args.get('webdriver_sslProxy') and self.system_https_proxy:
|
||||
proxy_args['httpsProxy'] = self.system_https_proxy
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
if proxy_override is not None:
|
||||
proxy_args['httpProxy'] = proxy_override
|
||||
|
||||
if proxy_args:
|
||||
self.proxy = SeleniumProxy(raw=proxy_args)
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_css_filter=None):
|
||||
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
# request_body, request_method unused for now, until some magic in the future happens.
|
||||
|
||||
# check env for WEBDRIVER_URL
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME,
|
||||
proxy=self.proxy)
|
||||
|
||||
try:
|
||||
self.driver.get(url)
|
||||
except WebDriverException as e:
|
||||
# Be sure we close the session window
|
||||
self.quit()
|
||||
raise
|
||||
|
||||
self.driver.set_window_size(1280, 1024)
|
||||
self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)))
|
||||
|
||||
if self.webdriver_js_execute_code is not None:
|
||||
self.driver.execute_script(self.webdriver_js_execute_code)
|
||||
# Selenium doesn't automatically wait for actions as good as Playwright, so wait again
|
||||
self.driver.implicitly_wait(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)))
|
||||
|
||||
self.screenshot = self.driver.get_screenshot_as_png()
|
||||
|
||||
# @todo - how to check this? is it possible?
|
||||
self.status_code = 200
|
||||
# @todo somehow we should try to get this working for WebDriver
|
||||
# raise EmptyReply(url=url, status_code=r.status_code)
|
||||
|
||||
# @todo - dom wait loaded?
|
||||
time.sleep(int(os.getenv("WEBDRIVER_DELAY_BEFORE_CONTENT_READY", 5)) + self.render_extract_delay)
|
||||
self.content = self.driver.page_source
|
||||
self.headers = {}
|
||||
|
||||
# Does the connection to the webdriver work? run a test connection.
|
||||
def is_ready(self):
|
||||
from selenium import webdriver
|
||||
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
|
||||
from selenium.common.exceptions import WebDriverException
|
||||
|
||||
self.driver = webdriver.Remote(
|
||||
command_executor=self.command_executor,
|
||||
desired_capabilities=DesiredCapabilities.CHROME)
|
||||
|
||||
# driver.quit() seems to cause better exceptions
|
||||
self.quit()
|
||||
return True
|
||||
|
||||
def quit(self):
|
||||
if self.driver:
|
||||
try:
|
||||
self.driver.quit()
|
||||
except Exception as e:
|
||||
print("Exception in chrome shutdown/quit" + str(e))
|
||||
|
||||
|
||||
# "html_requests" is listed as the default fetcher in store.py!
|
||||
class html_requests(Fetcher):
|
||||
fetcher_description = "Basic fast Plaintext/HTTP Client"
|
||||
|
||||
def __init__(self, proxy_override=None):
|
||||
self.proxy_override = proxy_override
|
||||
|
||||
def run(self,
|
||||
url,
|
||||
timeout,
|
||||
request_headers,
|
||||
request_body,
|
||||
request_method,
|
||||
ignore_status_codes=False,
|
||||
current_css_filter=None):
|
||||
|
||||
proxies = {}
|
||||
|
||||
# Allows override the proxy on a per-request basis
|
||||
if self.proxy_override:
|
||||
proxies = {'http': self.proxy_override, 'https': self.proxy_override, 'ftp': self.proxy_override}
|
||||
else:
|
||||
if self.system_http_proxy:
|
||||
proxies['http'] = self.system_http_proxy
|
||||
if self.system_https_proxy:
|
||||
proxies['https'] = self.system_https_proxy
|
||||
|
||||
r = requests.request(method=request_method,
|
||||
data=request_body,
|
||||
url=url,
|
||||
headers=request_headers,
|
||||
timeout=timeout,
|
||||
proxies=proxies,
|
||||
verify=False)
|
||||
|
||||
# If the response did not tell us what encoding format to expect, Then use chardet to override what `requests` thinks.
|
||||
# For example - some sites don't tell us it's utf-8, but return utf-8 content
|
||||
# This seems to not occur when using webdriver/selenium, it seems to detect the text encoding more reliably.
|
||||
# https://github.com/psf/requests/issues/1604 good info about requests encoding detection
|
||||
if not r.headers.get('content-type') or not 'charset=' in r.headers.get('content-type'):
|
||||
encoding = chardet.detect(r.content)['encoding']
|
||||
if encoding:
|
||||
r.encoding = encoding
|
||||
|
||||
if not r.content or not len(r.content):
|
||||
raise EmptyReply(url=url, status_code=r.status_code)
|
||||
|
||||
# @todo test this
|
||||
# @todo maybe you really want to test zero-byte return pages?
|
||||
if r.status_code != 200 and not ignore_status_codes:
|
||||
# maybe check with content works?
|
||||
raise Non200ErrorCodeReceived(url=url, status_code=r.status_code, page_html=r.text)
|
||||
|
||||
self.status_code = r.status_code
|
||||
self.content = r.text
|
||||
self.headers = r.headers
|
||||
|
||||
|
||||
# Decide which is the 'real' HTML webdriver, this is more a system wide config
|
||||
# rather than site-specific.
|
||||
use_playwright_as_chrome_fetcher = os.getenv('PLAYWRIGHT_DRIVER_URL', False)
|
||||
if use_playwright_as_chrome_fetcher:
|
||||
html_webdriver = base_html_playwright
|
||||
else:
|
||||
html_webdriver = base_html_webdriver
|
||||
52
changedetectionio/diff.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# used for the notifications, the front-end is using a JS library
|
||||
|
||||
import difflib
|
||||
|
||||
|
||||
def same_slicer(l, a, b):
|
||||
if a == b:
|
||||
return [l[a]]
|
||||
else:
|
||||
return l[a:b]
|
||||
|
||||
# like .compare but a little different output
|
||||
def customSequenceMatcher(before, after, include_equal=False):
|
||||
cruncher = difflib.SequenceMatcher(isjunk=lambda x: x in " \\t", a=before, b=after)
|
||||
|
||||
# @todo Line-by-line mode instead of buncghed, including `after` that is not in `before` (maybe unset?)
|
||||
for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
|
||||
if include_equal and tag == 'equal':
|
||||
g = before[alo:ahi]
|
||||
yield g
|
||||
elif tag == 'delete':
|
||||
g = ["(removed) " + i for i in same_slicer(before, alo, ahi)]
|
||||
yield g
|
||||
elif tag == 'replace':
|
||||
g = ["(changed) " + i for i in same_slicer(before, alo, ahi)]
|
||||
g += ["(into ) " + i for i in same_slicer(after, blo, bhi)]
|
||||
yield g
|
||||
elif tag == 'insert':
|
||||
g = ["(added ) " + i for i in same_slicer(after, blo, bhi)]
|
||||
yield g
|
||||
|
||||
# only_differences - only return info about the differences, no context
|
||||
# line_feed_sep could be "<br/>" or "<li>" or "\n" etc
|
||||
def render_diff(previous_file, newest_file, include_equal=False, line_feed_sep="\n"):
|
||||
with open(newest_file, 'r') as f:
|
||||
newest_version_file_contents = f.read()
|
||||
newest_version_file_contents = [line.rstrip() for line in newest_version_file_contents.splitlines()]
|
||||
|
||||
if previous_file:
|
||||
with open(previous_file, 'r') as f:
|
||||
previous_version_file_contents = f.read()
|
||||
previous_version_file_contents = [line.rstrip() for line in previous_version_file_contents.splitlines()]
|
||||
else:
|
||||
previous_version_file_contents = ""
|
||||
|
||||
rendered_diff = customSequenceMatcher(previous_version_file_contents,
|
||||
newest_version_file_contents,
|
||||
include_equal)
|
||||
|
||||
# Recursively join lists
|
||||
f = lambda L: line_feed_sep.join([f(x) if type(x) is list else x for x in L])
|
||||
return f(rendered_diff)
|
||||
323
changedetectionio/fetch_site_status.py
Normal file
@@ -0,0 +1,323 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import urllib3
|
||||
|
||||
from changedetectionio import content_fetcher, html_tools
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
# Some common stuff here that can be moved to a base class
|
||||
# (set_proxy_from_list)
|
||||
class perform_site_check():
|
||||
screenshot = None
|
||||
xpath_data = None
|
||||
|
||||
def __init__(self, *args, datastore, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.datastore = datastore
|
||||
|
||||
# If there was a proxy list enabled, figure out what proxy_args/which proxy to use
|
||||
# Returns the proxy as a URL
|
||||
# if watch.proxy use that
|
||||
# fetcher.proxy_override = watch.proxy or main config proxy
|
||||
# Allows override the proxy on a per-request basis
|
||||
# ALWAYS use the first one is nothing selected
|
||||
|
||||
def set_proxy_from_list(self, watch):
|
||||
proxy_args = None
|
||||
if self.datastore.proxy_list is None:
|
||||
return None
|
||||
|
||||
# If its a valid one
|
||||
if any([watch['proxy'] in p for p in self.datastore.proxy_list]):
|
||||
proxy_args = self.datastore.proxy_list.get(watch['proxy']).get('url')
|
||||
|
||||
# not valid (including None), try the system one
|
||||
else:
|
||||
system_proxy = self.datastore.data['settings']['requests']['proxy']
|
||||
# Is not None and exists
|
||||
if self.datastore.proxy_list.get():
|
||||
proxy_args = self.datastore.proxy_list.get(system_proxy).get('url')
|
||||
|
||||
# Fallback - Did not resolve anything, use the first available
|
||||
if proxy_args is None:
|
||||
first_default = list(self.datastore.proxy_list)[0]
|
||||
proxy_args = self.datastore.proxy_list.get(first_default).get('url')
|
||||
|
||||
return proxy_args
|
||||
|
||||
# Doesn't look like python supports forward slash auto enclosure in re.findall
|
||||
# So convert it to inline flag "foobar(?i)" type configuration
|
||||
def forward_slash_enclosed_regex_to_options(self, regex):
|
||||
res = re.search(r'^/(.*?)/(\w+)$', regex, re.IGNORECASE)
|
||||
|
||||
if res:
|
||||
regex = res.group(1)
|
||||
regex += '(?{})'.format(res.group(2))
|
||||
else:
|
||||
regex += '(?{})'.format('i')
|
||||
|
||||
return regex
|
||||
|
||||
|
||||
def run(self, uuid):
|
||||
changed_detected = False
|
||||
screenshot = False # as bytes
|
||||
stripped_text_from_html = ""
|
||||
|
||||
watch = self.datastore.data['watching'].get(uuid)
|
||||
if not watch:
|
||||
return
|
||||
|
||||
# Protect against file:// access
|
||||
if re.search(r'^file', watch['url'], re.IGNORECASE) and not os.getenv('ALLOW_FILE_URI', False):
|
||||
raise Exception(
|
||||
"file:// type access is denied for security reasons."
|
||||
)
|
||||
|
||||
# Unset any existing notification error
|
||||
update_obj = {'last_notification_error': False, 'last_error': False}
|
||||
|
||||
extra_headers =self.datastore.data['watching'][uuid].get('headers')
|
||||
|
||||
# Tweak the base config with the per-watch ones
|
||||
request_headers = self.datastore.data['settings']['headers'].copy()
|
||||
request_headers.update(extra_headers)
|
||||
|
||||
# https://github.com/psf/requests/issues/4525
|
||||
# Requests doesnt yet support brotli encoding, so don't put 'br' here, be totally sure that the user cannot
|
||||
# do this by accident.
|
||||
if 'Accept-Encoding' in request_headers and "br" in request_headers['Accept-Encoding']:
|
||||
request_headers['Accept-Encoding'] = request_headers['Accept-Encoding'].replace(', br', '')
|
||||
|
||||
timeout = self.datastore.data['settings']['requests'].get('timeout')
|
||||
url = watch.get('url')
|
||||
request_body = self.datastore.data['watching'][uuid].get('body')
|
||||
request_method = self.datastore.data['watching'][uuid].get('method')
|
||||
ignore_status_codes = self.datastore.data['watching'][uuid].get('ignore_status_codes', False)
|
||||
|
||||
# source: support
|
||||
is_source = False
|
||||
if url.startswith('source:'):
|
||||
url = url.replace('source:', '')
|
||||
is_source = True
|
||||
|
||||
# Pluggable content fetcher
|
||||
prefer_backend = watch['fetch_backend']
|
||||
if hasattr(content_fetcher, prefer_backend):
|
||||
klass = getattr(content_fetcher, prefer_backend)
|
||||
else:
|
||||
# If the klass doesnt exist, just use a default
|
||||
klass = getattr(content_fetcher, "html_requests")
|
||||
|
||||
proxy_url = self.set_proxy_from_list(watch)
|
||||
if proxy_url:
|
||||
print ("UUID {} Using proxy {}".format(uuid, proxy_url))
|
||||
fetcher = klass(proxy_override=proxy_url)
|
||||
|
||||
# Configurable per-watch or global extra delay before extracting text (for webDriver types)
|
||||
system_webdriver_delay = self.datastore.data['settings']['application'].get('webdriver_delay', None)
|
||||
if watch['webdriver_delay'] is not None:
|
||||
fetcher.render_extract_delay = watch['webdriver_delay']
|
||||
elif system_webdriver_delay is not None:
|
||||
fetcher.render_extract_delay = system_webdriver_delay
|
||||
|
||||
if watch['webdriver_js_execute_code'] is not None and watch['webdriver_js_execute_code'].strip():
|
||||
fetcher.webdriver_js_execute_code = watch['webdriver_js_execute_code']
|
||||
|
||||
fetcher.run(url, timeout, request_headers, request_body, request_method, ignore_status_codes, watch['css_filter'])
|
||||
fetcher.quit()
|
||||
|
||||
self.screenshot = fetcher.screenshot
|
||||
self.xpath_data = fetcher.xpath_data
|
||||
|
||||
# Fetching complete, now filters
|
||||
# @todo move to class / maybe inside of fetcher abstract base?
|
||||
|
||||
# @note: I feel like the following should be in a more obvious chain system
|
||||
# - Check filter text
|
||||
# - Is the checksum different?
|
||||
# - Do we convert to JSON?
|
||||
# https://stackoverflow.com/questions/41817578/basic-method-chaining ?
|
||||
# return content().textfilter().jsonextract().checksumcompare() ?
|
||||
|
||||
is_json = 'application/json' in fetcher.headers.get('Content-Type', '')
|
||||
is_html = not is_json
|
||||
|
||||
# source: support, basically treat it as plaintext
|
||||
if is_source:
|
||||
is_html = False
|
||||
is_json = False
|
||||
|
||||
css_filter_rule = watch['css_filter']
|
||||
subtractive_selectors = watch.get(
|
||||
"subtractive_selectors", []
|
||||
) + self.datastore.data["settings"]["application"].get(
|
||||
"global_subtractive_selectors", []
|
||||
)
|
||||
|
||||
has_filter_rule = css_filter_rule and len(css_filter_rule.strip())
|
||||
has_subtractive_selectors = subtractive_selectors and len(subtractive_selectors[0].strip())
|
||||
|
||||
if is_json and not has_filter_rule:
|
||||
css_filter_rule = "json:$"
|
||||
has_filter_rule = True
|
||||
|
||||
if has_filter_rule:
|
||||
if 'json:' in css_filter_rule:
|
||||
stripped_text_from_html = html_tools.extract_json_as_string(content=fetcher.content, jsonpath_filter=css_filter_rule)
|
||||
is_html = False
|
||||
|
||||
if is_html or is_source:
|
||||
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
fetcher.content = html_tools.workarounds_for_obfuscations(fetcher.content)
|
||||
html_content = fetcher.content
|
||||
|
||||
# If not JSON, and if it's not text/plain..
|
||||
if 'text/plain' in fetcher.headers.get('Content-Type', '').lower():
|
||||
# Don't run get_text or xpath/css filters on plaintext
|
||||
stripped_text_from_html = html_content
|
||||
else:
|
||||
# Then we assume HTML
|
||||
if has_filter_rule:
|
||||
# For HTML/XML we offer xpath as an option, just start a regular xPath "/.."
|
||||
if css_filter_rule[0] == '/' or css_filter_rule.startswith('xpath:'):
|
||||
html_content = html_tools.xpath_filter(xpath_filter=css_filter_rule.replace('xpath:', ''),
|
||||
html_content=fetcher.content)
|
||||
else:
|
||||
# CSS Filter, extract the HTML that matches and feed that into the existing inscriptis::get_text
|
||||
html_content = html_tools.css_filter(css_filter=css_filter_rule, html_content=fetcher.content)
|
||||
|
||||
if has_subtractive_selectors:
|
||||
html_content = html_tools.element_removal(subtractive_selectors, html_content)
|
||||
|
||||
if not is_source:
|
||||
# extract text
|
||||
stripped_text_from_html = \
|
||||
html_tools.html_to_text(
|
||||
html_content,
|
||||
render_anchor_tag_content=self.datastore.data["settings"][
|
||||
"application"].get(
|
||||
"render_anchor_tag_content", False)
|
||||
)
|
||||
|
||||
elif is_source:
|
||||
stripped_text_from_html = html_content
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
# Re #340 - return the content before the 'ignore text' was applied
|
||||
text_content_before_ignored_filter = stripped_text_from_html.encode('utf-8')
|
||||
|
||||
# Treat pages with no renderable text content as a change? No by default
|
||||
empty_pages_are_a_change = self.datastore.data['settings']['application'].get('empty_pages_are_a_change', False)
|
||||
if not is_json and not empty_pages_are_a_change and len(stripped_text_from_html.strip()) == 0:
|
||||
raise content_fetcher.ReplyWithContentButNoText(url=url, status_code=fetcher.get_last_status_code(), screenshot=screenshot)
|
||||
|
||||
# We rely on the actual text in the html output.. many sites have random script vars etc,
|
||||
# in the future we'll implement other mechanisms.
|
||||
|
||||
update_obj["last_check_status"] = fetcher.get_last_status_code()
|
||||
|
||||
# If there's text to skip
|
||||
# @todo we could abstract out the get_text() to handle this cleaner
|
||||
text_to_ignore = watch.get('ignore_text', []) + self.datastore.data['settings']['application'].get('global_ignore_text', [])
|
||||
if len(text_to_ignore):
|
||||
stripped_text_from_html = html_tools.strip_ignore_text(stripped_text_from_html, text_to_ignore)
|
||||
else:
|
||||
stripped_text_from_html = stripped_text_from_html.encode('utf8')
|
||||
|
||||
# 615 Extract text by regex
|
||||
extract_text = watch.get('extract_text', [])
|
||||
if len(extract_text) > 0:
|
||||
regex_matched_output = []
|
||||
for s_re in extract_text:
|
||||
# incase they specified something in '/.../x'
|
||||
regex = self.forward_slash_enclosed_regex_to_options(s_re)
|
||||
result = re.findall(regex.encode('utf-8'), stripped_text_from_html)
|
||||
|
||||
for l in result:
|
||||
if type(l) is tuple:
|
||||
#@todo - some formatter option default (between groups)
|
||||
regex_matched_output += list(l) + [b'\n']
|
||||
else:
|
||||
# @todo - some formatter option default (between each ungrouped result)
|
||||
regex_matched_output += [l] + [b'\n']
|
||||
|
||||
# Now we will only show what the regex matched
|
||||
stripped_text_from_html = b''
|
||||
text_content_before_ignored_filter = b''
|
||||
if regex_matched_output:
|
||||
# @todo some formatter for presentation?
|
||||
stripped_text_from_html = b''.join(regex_matched_output)
|
||||
text_content_before_ignored_filter = stripped_text_from_html
|
||||
|
||||
|
||||
# Re #133 - if we should strip whitespaces from triggering the change detected comparison
|
||||
if self.datastore.data['settings']['application'].get('ignore_whitespace', False):
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html.translate(None, b'\r\n\t ')).hexdigest()
|
||||
else:
|
||||
fetched_md5 = hashlib.md5(stripped_text_from_html).hexdigest()
|
||||
|
||||
############ Blocking rules, after checksum #################
|
||||
blocked = False
|
||||
|
||||
if len(watch['trigger_text']):
|
||||
# Assume blocked
|
||||
blocked = True
|
||||
# Filter and trigger works the same, so reuse it
|
||||
# It should return the line numbers that match
|
||||
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
|
||||
wordlist=watch['trigger_text'],
|
||||
mode="line numbers")
|
||||
# Unblock if the trigger was found
|
||||
if result:
|
||||
blocked = False
|
||||
|
||||
|
||||
if len(watch['text_should_not_be_present']):
|
||||
# If anything matched, then we should block a change from happening
|
||||
result = html_tools.strip_ignore_text(content=str(stripped_text_from_html),
|
||||
wordlist=watch['text_should_not_be_present'],
|
||||
mode="line numbers")
|
||||
if result:
|
||||
blocked = True
|
||||
|
||||
# The main thing that all this at the moment comes down to :)
|
||||
if watch['previous_md5'] != fetched_md5:
|
||||
changed_detected = True
|
||||
|
||||
# Looks like something changed, but did it match all the rules?
|
||||
if blocked:
|
||||
changed_detected = False
|
||||
|
||||
# Extract title as title
|
||||
if is_html:
|
||||
if self.datastore.data['settings']['application']['extract_title_as_title'] or watch['extract_title_as_title']:
|
||||
if not watch['title'] or not len(watch['title']):
|
||||
update_obj['title'] = html_tools.extract_element(find='title', html_content=fetcher.content)
|
||||
|
||||
if changed_detected:
|
||||
if watch.get('check_unique_lines', False):
|
||||
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines())
|
||||
# One or more lines? unsure?
|
||||
if not has_unique_lines:
|
||||
logging.debug("check_unique_lines: UUID {} didnt have anything new setting change_detected=False".format(uuid))
|
||||
changed_detected = False
|
||||
else:
|
||||
logging.debug("check_unique_lines: UUID {} had unique content".format(uuid))
|
||||
|
||||
# Always record the new checksum
|
||||
update_obj["previous_md5"] = fetched_md5
|
||||
|
||||
# On the first run of a site, watch['previous_md5'] will be None, set it the current one.
|
||||
if not watch.get('previous_md5'):
|
||||
watch['previous_md5'] = fetched_md5
|
||||
|
||||
return changed_detected, update_obj, text_content_before_ignored_filter
|
||||
409
changedetectionio/forms.py
Normal file
@@ -0,0 +1,409 @@
|
||||
import re
|
||||
|
||||
from wtforms import (
|
||||
BooleanField,
|
||||
Field,
|
||||
Form,
|
||||
IntegerField,
|
||||
PasswordField,
|
||||
RadioField,
|
||||
SelectField,
|
||||
StringField,
|
||||
SubmitField,
|
||||
TextAreaField,
|
||||
fields,
|
||||
validators,
|
||||
widgets,
|
||||
)
|
||||
from wtforms.validators import ValidationError
|
||||
|
||||
from changedetectionio import content_fetcher
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
default_notification_title,
|
||||
valid_notification_formats,
|
||||
)
|
||||
|
||||
from wtforms.fields import FormField
|
||||
|
||||
valid_method = {
|
||||
'GET',
|
||||
'POST',
|
||||
'PUT',
|
||||
'PATCH',
|
||||
'DELETE',
|
||||
}
|
||||
|
||||
default_method = 'GET'
|
||||
|
||||
|
||||
class StringListField(StringField):
|
||||
widget = widgets.TextArea()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
# ignore empty lines in the storage
|
||||
data = list(filter(lambda x: len(x.strip()), self.data))
|
||||
# Apply strip to each line
|
||||
data = list(map(lambda x: x.strip(), data))
|
||||
return "\r\n".join(data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist and len(valuelist[0].strip()):
|
||||
# Remove empty strings, stripping and splitting \r\n, only \n etc.
|
||||
self.data = valuelist[0].splitlines()
|
||||
# Remove empty lines from the final data
|
||||
self.data = list(filter(lambda x: len(x.strip()), self.data))
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
class SaltyPasswordField(StringField):
|
||||
widget = widgets.PasswordInput()
|
||||
encrypted_password = ""
|
||||
|
||||
def build_password(self, password):
|
||||
import base64
|
||||
import hashlib
|
||||
import secrets
|
||||
|
||||
# Make a new salt on every new password and store it with the password
|
||||
salt = secrets.token_bytes(32)
|
||||
|
||||
key = hashlib.pbkdf2_hmac('sha256', password.encode('utf-8'), salt, 100000)
|
||||
store = base64.b64encode(salt + key).decode('ascii')
|
||||
|
||||
return store
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
# Be really sure it's non-zero in length
|
||||
if len(valuelist[0].strip()) > 0:
|
||||
self.encrypted_password = self.build_password(valuelist[0])
|
||||
self.data = ""
|
||||
else:
|
||||
self.data = False
|
||||
|
||||
class TimeBetweenCheckForm(Form):
|
||||
weeks = IntegerField('Weeks', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
days = IntegerField('Days', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
hours = IntegerField('Hours', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
minutes = IntegerField('Minutes', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
seconds = IntegerField('Seconds', validators=[validators.Optional(), validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
# @todo add total seconds minimum validatior = minimum_seconds_recheck_time
|
||||
|
||||
# Separated by key:value
|
||||
class StringDictKeyValue(StringField):
|
||||
widget = widgets.TextArea()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
output = u''
|
||||
for k in self.data.keys():
|
||||
output += "{}: {}\r\n".format(k, self.data[k])
|
||||
|
||||
return output
|
||||
else:
|
||||
return u''
|
||||
|
||||
# incoming
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = {}
|
||||
# Remove empty strings
|
||||
cleaned = list(filter(None, valuelist[0].split("\n")))
|
||||
for s in cleaned:
|
||||
parts = s.strip().split(':', 1)
|
||||
if len(parts) == 2:
|
||||
self.data.update({parts[0].strip(): parts[1].strip()})
|
||||
|
||||
else:
|
||||
self.data = {}
|
||||
|
||||
class ValidateContentFetcherIsReady(object):
|
||||
"""
|
||||
Validates that anything that looks like a regex passes as a regex
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import urllib3.exceptions
|
||||
from changedetectionio import content_fetcher
|
||||
|
||||
# Better would be a radiohandler that keeps a reference to each class
|
||||
if field.data is not None:
|
||||
klass = getattr(content_fetcher, field.data)
|
||||
some_object = klass()
|
||||
try:
|
||||
ready = some_object.is_ready()
|
||||
|
||||
except urllib3.exceptions.MaxRetryError as e:
|
||||
driver_url = some_object.command_executor
|
||||
message = field.gettext('Content fetcher \'%s\' did not respond.' % (field.data))
|
||||
message += '<br/>' + field.gettext(
|
||||
'Be sure that the selenium/webdriver runner is running and accessible via network from this container/host.')
|
||||
message += '<br/>' + field.gettext('Did you follow the instructions in the wiki?')
|
||||
message += '<br/><br/>' + field.gettext('WebDriver Host: %s' % (driver_url))
|
||||
message += '<br/><a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">Go here for more information</a>'
|
||||
message += '<br/>'+field.gettext('Content fetcher did not respond properly, unable to use it.\n %s' % (str(e)))
|
||||
|
||||
raise ValidationError(message)
|
||||
|
||||
except Exception as e:
|
||||
message = field.gettext('Content fetcher \'%s\' did not respond properly, unable to use it.\n %s')
|
||||
raise ValidationError(message % (field.data, e))
|
||||
|
||||
|
||||
class ValidateNotificationBodyAndTitleWhenURLisSet(object):
|
||||
"""
|
||||
Validates that they entered something in both notification title+body when the URL is set
|
||||
Due to https://github.com/dgtlmoon/changedetection.io/issues/360
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
if len(field.data):
|
||||
if not len(form.notification_title.data) or not len(form.notification_body.data):
|
||||
message = field.gettext('Notification Body and Title is required when a Notification URL is used')
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateAppRiseServers(object):
|
||||
"""
|
||||
Validates that each URL given is compatible with AppRise
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import apprise
|
||||
apobj = apprise.Apprise()
|
||||
|
||||
for server_url in field.data:
|
||||
if not apobj.add(server_url):
|
||||
message = field.gettext('\'%s\' is not a valid AppRise URL.' % (server_url))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateTokensList(object):
|
||||
"""
|
||||
Validates that a {token} is from a valid set
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
from changedetectionio import notification
|
||||
regex = re.compile('{.*?}')
|
||||
for p in re.findall(regex, field.data):
|
||||
if not p.strip('{}') in notification.valid_tokens:
|
||||
message = field.gettext('Token \'%s\' is not a valid token.')
|
||||
raise ValidationError(message % (p))
|
||||
|
||||
class validateURL(object):
|
||||
|
||||
"""
|
||||
Flask wtform validators wont work with basic auth
|
||||
"""
|
||||
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
import validators
|
||||
try:
|
||||
validators.url(field.data.strip())
|
||||
except validators.ValidationFailure:
|
||||
message = field.gettext('\'%s\' is not a valid URL.' % (field.data.strip()))
|
||||
raise ValidationError(message)
|
||||
|
||||
class ValidateListRegex(object):
|
||||
"""
|
||||
Validates that anything that looks like a regex passes as a regex
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
|
||||
def __call__(self, form, field):
|
||||
|
||||
for line in field.data:
|
||||
if line[0] == '/' and line[-1] == '/':
|
||||
# Because internally we dont wrap in /
|
||||
line = line.strip('/')
|
||||
try:
|
||||
re.compile(line)
|
||||
except re.error:
|
||||
message = field.gettext('RegEx \'%s\' is not a valid regular expression.')
|
||||
raise ValidationError(message % (line))
|
||||
|
||||
class ValidateCSSJSONXPATHInput(object):
|
||||
"""
|
||||
Filter validation
|
||||
@todo CSS validator ;)
|
||||
"""
|
||||
|
||||
def __init__(self, message=None, allow_xpath=True, allow_json=True):
|
||||
self.message = message
|
||||
self.allow_xpath = allow_xpath
|
||||
self.allow_json = allow_json
|
||||
|
||||
def __call__(self, form, field):
|
||||
|
||||
if isinstance(field.data, str):
|
||||
data = [field.data]
|
||||
else:
|
||||
data = field.data
|
||||
|
||||
for line in data:
|
||||
# Nothing to see here
|
||||
if not len(line.strip()):
|
||||
return
|
||||
|
||||
# Does it look like XPath?
|
||||
if line.strip()[0] == '/':
|
||||
if not self.allow_xpath:
|
||||
raise ValidationError("XPath not permitted in this field!")
|
||||
from lxml import etree, html
|
||||
tree = html.fromstring("<html></html>")
|
||||
|
||||
try:
|
||||
tree.xpath(line.strip())
|
||||
except etree.XPathEvalError as e:
|
||||
message = field.gettext('\'%s\' is not a valid XPath expression. (%s)')
|
||||
raise ValidationError(message % (line, str(e)))
|
||||
except:
|
||||
raise ValidationError("A system-error occurred when validating your XPath expression")
|
||||
|
||||
if 'json:' in line:
|
||||
if not self.allow_json:
|
||||
raise ValidationError("JSONPath not permitted in this field!")
|
||||
|
||||
from jsonpath_ng.exceptions import (
|
||||
JsonPathLexerError,
|
||||
JsonPathParserError,
|
||||
)
|
||||
from jsonpath_ng.ext import parse
|
||||
|
||||
input = line.replace('json:', '')
|
||||
|
||||
try:
|
||||
parse(input)
|
||||
except (JsonPathParserError, JsonPathLexerError) as e:
|
||||
message = field.gettext('\'%s\' is not a valid JSONPath expression. (%s)')
|
||||
raise ValidationError(message % (input, str(e)))
|
||||
except:
|
||||
raise ValidationError("A system-error occurred when validating your JSONPath expression")
|
||||
|
||||
# Re #265 - maybe in the future fetch the page and offer a
|
||||
# warning/notice that its possible the rule doesnt yet match anything?
|
||||
|
||||
|
||||
class quickWatchForm(Form):
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional()])
|
||||
watch_submit_button = SubmitField('Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||
edit_and_watch_submit_button = SubmitField('Edit > Watch', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
|
||||
# Common to a single watch and the global settings
|
||||
class commonSettingsForm(Form):
|
||||
notification_urls = StringListField('Notification URL list', validators=[validators.Optional(), ValidateAppRiseServers()])
|
||||
notification_title = StringField('Notification title', validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_body = TextAreaField('Notification body', validators=[validators.Optional(), ValidateTokensList()])
|
||||
notification_format = SelectField('Notification format', choices=valid_notification_formats.keys())
|
||||
fetch_backend = RadioField(u'Fetch method', choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
extract_title_as_title = BooleanField('Extract <title> from document and use as watch title', default=False)
|
||||
webdriver_delay = IntegerField('Wait seconds before extracting text', validators=[validators.Optional(), validators.NumberRange(min=1,
|
||||
message="Should contain one or more seconds")])
|
||||
|
||||
class watchForm(commonSettingsForm):
|
||||
|
||||
url = fields.URLField('URL', validators=[validateURL()])
|
||||
tag = StringField('Group tag', [validators.Optional()], default='')
|
||||
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
|
||||
css_filter = StringField('CSS/JSON/XPATH Filter', [ValidateCSSJSONXPATHInput()], default='')
|
||||
|
||||
subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
|
||||
extract_text = StringListField('Extract text', [ValidateListRegex()])
|
||||
|
||||
title = StringField('Title', default='')
|
||||
|
||||
ignore_text = StringListField('Ignore text', [ValidateListRegex()])
|
||||
headers = StringDictKeyValue('Request headers')
|
||||
body = TextAreaField('Request body', [validators.Optional()])
|
||||
method = SelectField('Request method', choices=valid_method, default=default_method)
|
||||
ignore_status_codes = BooleanField('Ignore status codes (process non-2xx status codes as normal)', default=False)
|
||||
check_unique_lines = BooleanField('Only trigger when new lines appear', default=False)
|
||||
trigger_text = StringListField('Trigger/wait for text', [validators.Optional(), ValidateListRegex()])
|
||||
text_should_not_be_present = StringListField('Block change-detection if text matches', [validators.Optional(), ValidateListRegex()])
|
||||
|
||||
webdriver_js_execute_code = TextAreaField('Execute JavaScript before change detection', render_kw={"rows": "5"}, validators=[validators.Optional()])
|
||||
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
|
||||
proxy = RadioField('Proxy')
|
||||
filter_failure_notification_send = BooleanField(
|
||||
'Send a notification when the filter can no longer be found on the page', default=False)
|
||||
|
||||
notification_muted = BooleanField('Notifications Muted / Off', default=False)
|
||||
|
||||
def validate(self, **kwargs):
|
||||
if not super().validate():
|
||||
return False
|
||||
|
||||
result = True
|
||||
|
||||
# Fail form validation when a body is set for a GET
|
||||
if self.method.data == 'GET' and self.body.data:
|
||||
self.body.errors.append('Body must be empty when Request Method is set to GET')
|
||||
result = False
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# datastore.data['settings']['requests']..
|
||||
class globalSettingsRequestForm(Form):
|
||||
time_between_check = FormField(TimeBetweenCheckForm)
|
||||
proxy = RadioField('Proxy')
|
||||
jitter_seconds = IntegerField('Random jitter seconds ± check',
|
||||
render_kw={"style": "width: 5em;"},
|
||||
validators=[validators.NumberRange(min=0, message="Should contain zero or more seconds")])
|
||||
|
||||
# datastore.data['settings']['application']..
|
||||
class globalSettingsApplicationForm(commonSettingsForm):
|
||||
|
||||
base_url = StringField('Base URL', validators=[validators.Optional()])
|
||||
global_subtractive_selectors = StringListField('Remove elements', [ValidateCSSJSONXPATHInput(allow_xpath=False, allow_json=False)])
|
||||
global_ignore_text = StringListField('Ignore Text', [ValidateListRegex()])
|
||||
ignore_whitespace = BooleanField('Ignore whitespace')
|
||||
removepassword_button = SubmitField('Remove password', render_kw={"class": "pure-button pure-button-primary"})
|
||||
empty_pages_are_a_change = BooleanField('Treat empty pages as a change?', default=False)
|
||||
render_anchor_tag_content = BooleanField('Render anchor tag content', default=False)
|
||||
fetch_backend = RadioField('Fetch Method', default="html_requests", choices=content_fetcher.available_fetchers(), validators=[ValidateContentFetcherIsReady()])
|
||||
api_access_token_enabled = BooleanField('API access token security check enabled', default=True, validators=[validators.Optional()])
|
||||
password = SaltyPasswordField()
|
||||
|
||||
filter_failure_notification_threshold_attempts = IntegerField('Number of times the filter can be missing before sending a notification',
|
||||
render_kw={"style": "width: 5em;"},
|
||||
validators=[validators.NumberRange(min=0,
|
||||
message="Should contain zero or more attempts")])
|
||||
|
||||
|
||||
class globalSettingsForm(Form):
|
||||
# Define these as FormFields/"sub forms", this way it matches the JSON storage
|
||||
# datastore.data['settings']['application']..
|
||||
# datastore.data['settings']['requests']..
|
||||
|
||||
requests = FormField(globalSettingsRequestForm)
|
||||
application = FormField(globalSettingsApplicationForm)
|
||||
save_button = SubmitField('Save', render_kw={"class": "pure-button pure-button-primary"})
|
||||
235
changedetectionio/html_tools.py
Normal file
@@ -0,0 +1,235 @@
|
||||
import json
|
||||
from typing import List
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from jsonpath_ng.ext import parse
|
||||
import re
|
||||
from inscriptis import get_text
|
||||
from inscriptis.model.config import ParserConfig
|
||||
|
||||
class FilterNotFoundInResponse(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
class JSONNotFound(ValueError):
|
||||
def __init__(self, msg):
|
||||
ValueError.__init__(self, msg)
|
||||
|
||||
|
||||
# Given a CSS Rule, and a blob of HTML, return the blob of HTML that matches
|
||||
def css_filter(css_filter, html_content):
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
html_block = ""
|
||||
r = soup.select(css_filter, separator="")
|
||||
if len(html_content) > 0 and len(r) == 0:
|
||||
raise FilterNotFoundInResponse(css_filter)
|
||||
for item in r:
|
||||
html_block += str(item)
|
||||
|
||||
return html_block + "\n"
|
||||
|
||||
def subtractive_css_selector(css_selector, html_content):
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
for item in soup.select(css_selector):
|
||||
item.decompose()
|
||||
return str(soup)
|
||||
|
||||
|
||||
def element_removal(selectors: List[str], html_content):
|
||||
"""Joins individual filters into one css filter."""
|
||||
selector = ",".join(selectors)
|
||||
return subtractive_css_selector(selector, html_content)
|
||||
|
||||
|
||||
# Return str Utf-8 of matched rules
|
||||
def xpath_filter(xpath_filter, html_content):
|
||||
from lxml import etree, html
|
||||
|
||||
tree = html.fromstring(bytes(html_content, encoding='utf-8'))
|
||||
html_block = ""
|
||||
|
||||
r = tree.xpath(xpath_filter.strip(), namespaces={'re': 'http://exslt.org/regular-expressions'})
|
||||
if len(html_content) > 0 and len(r) == 0:
|
||||
raise FilterNotFoundInResponse(xpath_filter)
|
||||
|
||||
#@note: //title/text() wont work where <title>CDATA..
|
||||
|
||||
for element in r:
|
||||
if type(element) == etree._ElementStringResult:
|
||||
html_block += str(element) + "<br/>"
|
||||
elif type(element) == etree._ElementUnicodeResult:
|
||||
html_block += str(element) + "<br/>"
|
||||
else:
|
||||
html_block += etree.tostring(element, pretty_print=True).decode('utf-8') + "<br/>"
|
||||
|
||||
return html_block
|
||||
|
||||
|
||||
# Extract/find element
|
||||
def extract_element(find='title', html_content=''):
|
||||
|
||||
#Re #106, be sure to handle when its not found
|
||||
element_text = None
|
||||
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
result = soup.find(find)
|
||||
if result and result.string:
|
||||
element_text = result.string.strip()
|
||||
|
||||
return element_text
|
||||
|
||||
#
|
||||
def _parse_json(json_data, jsonpath_filter):
|
||||
s=[]
|
||||
jsonpath_expression = parse(jsonpath_filter.replace('json:', ''))
|
||||
match = jsonpath_expression.find(json_data)
|
||||
|
||||
# More than one result, we will return it as a JSON list.
|
||||
if len(match) > 1:
|
||||
for i in match:
|
||||
s.append(i.value)
|
||||
|
||||
# Single value, use just the value, as it could be later used in a token in notifications.
|
||||
if len(match) == 1:
|
||||
s = match[0].value
|
||||
|
||||
# Re #257 - Better handling where it does not exist, in the case the original 's' value was False..
|
||||
if not match:
|
||||
# Re 265 - Just return an empty string when filter not found
|
||||
return ''
|
||||
|
||||
# Ticket #462 - allow the original encoding through, usually it's UTF-8 or similar
|
||||
stripped_text_from_html = json.dumps(s, indent=4, ensure_ascii=False)
|
||||
|
||||
return stripped_text_from_html
|
||||
|
||||
def extract_json_as_string(content, jsonpath_filter):
|
||||
|
||||
stripped_text_from_html = False
|
||||
|
||||
# Try to parse/filter out the JSON, if we get some parser error, then maybe it's embedded <script type=ldjson>
|
||||
try:
|
||||
stripped_text_from_html = _parse_json(json.loads(content), jsonpath_filter)
|
||||
except json.JSONDecodeError:
|
||||
|
||||
# Foreach <script json></script> blob.. just return the first that matches jsonpath_filter
|
||||
s = []
|
||||
soup = BeautifulSoup(content, 'html.parser')
|
||||
bs_result = soup.findAll('script')
|
||||
|
||||
if not bs_result:
|
||||
raise JSONNotFound("No parsable JSON found in this document")
|
||||
|
||||
for result in bs_result:
|
||||
# Skip empty tags, and things that dont even look like JSON
|
||||
if not result.string or not '{' in result.string:
|
||||
continue
|
||||
|
||||
try:
|
||||
json_data = json.loads(result.string)
|
||||
except json.JSONDecodeError:
|
||||
# Just skip it
|
||||
continue
|
||||
else:
|
||||
stripped_text_from_html = _parse_json(json_data, jsonpath_filter)
|
||||
if stripped_text_from_html:
|
||||
break
|
||||
|
||||
if not stripped_text_from_html:
|
||||
# Re 265 - Just return an empty string when filter not found
|
||||
return ''
|
||||
|
||||
return stripped_text_from_html
|
||||
|
||||
# Mode - "content" return the content without the matches (default)
|
||||
# - "line numbers" return a list of line numbers that match (int list)
|
||||
#
|
||||
# wordlist - list of regex's (str) or words (str)
|
||||
def strip_ignore_text(content, wordlist, mode="content"):
|
||||
ignore = []
|
||||
ignore_regex = []
|
||||
|
||||
# @todo check this runs case insensitive
|
||||
for k in wordlist:
|
||||
|
||||
# Is it a regex?
|
||||
if k[0] == '/':
|
||||
ignore_regex.append(k.strip(" /"))
|
||||
else:
|
||||
ignore.append(k)
|
||||
|
||||
i = 0
|
||||
output = []
|
||||
ignored_line_numbers = []
|
||||
for line in content.splitlines():
|
||||
i += 1
|
||||
# Always ignore blank lines in this mode. (when this function gets called)
|
||||
if len(line.strip()):
|
||||
regex_matches = False
|
||||
|
||||
# if any of these match, skip
|
||||
for regex in ignore_regex:
|
||||
try:
|
||||
if re.search(regex, line, re.IGNORECASE):
|
||||
regex_matches = True
|
||||
except Exception as e:
|
||||
continue
|
||||
|
||||
if not regex_matches and not any(skip_text.lower() in line.lower() for skip_text in ignore):
|
||||
output.append(line.encode('utf8'))
|
||||
else:
|
||||
ignored_line_numbers.append(i)
|
||||
|
||||
|
||||
|
||||
# Used for finding out what to highlight
|
||||
if mode == "line numbers":
|
||||
return ignored_line_numbers
|
||||
|
||||
return "\n".encode('utf8').join(output)
|
||||
|
||||
|
||||
def html_to_text(html_content: str, render_anchor_tag_content=False) -> str:
|
||||
"""Converts html string to a string with just the text. If ignoring
|
||||
rendering anchor tag content is enable, anchor tag content are also
|
||||
included in the text
|
||||
|
||||
:param html_content: string with html content
|
||||
:param render_anchor_tag_content: boolean flag indicating whether to extract
|
||||
hyperlinks (the anchor tag content) together with text. This refers to the
|
||||
'href' inside 'a' tags.
|
||||
Anchor tag content is rendered in the following manner:
|
||||
'[ text ](anchor tag content)'
|
||||
:return: extracted text from the HTML
|
||||
"""
|
||||
# if anchor tag content flag is set to True define a config for
|
||||
# extracting this content
|
||||
if render_anchor_tag_content:
|
||||
|
||||
parser_config = ParserConfig(
|
||||
annotation_rules={"a": ["hyperlink"]}, display_links=True
|
||||
)
|
||||
|
||||
# otherwise set config to None
|
||||
else:
|
||||
parser_config = None
|
||||
|
||||
# get text and annotations via inscriptis
|
||||
text_content = get_text(html_content, config=parser_config)
|
||||
|
||||
return text_content
|
||||
|
||||
def workarounds_for_obfuscations(content):
|
||||
"""
|
||||
Some sites are using sneaky tactics to make prices and other information un-renderable by Inscriptis
|
||||
This could go into its own Pip package in the future, for faster updates
|
||||
"""
|
||||
|
||||
# HomeDepot.com style <span>$<!-- -->90<!-- -->.<!-- -->74</span>
|
||||
# https://github.com/weblyzard/inscriptis/issues/45
|
||||
if not content:
|
||||
return content
|
||||
|
||||
content = re.sub('<!--\s+-->', '', content)
|
||||
|
||||
return content
|
||||
130
changedetectionio/importer.py
Normal file
@@ -0,0 +1,130 @@
|
||||
from abc import ABC, abstractmethod
|
||||
import time
|
||||
import validators
|
||||
|
||||
|
||||
class Importer():
|
||||
remaining_data = []
|
||||
new_uuids = []
|
||||
good = 0
|
||||
|
||||
def __init__(self):
|
||||
self.new_uuids = []
|
||||
self.good = 0
|
||||
self.remaining_data = []
|
||||
|
||||
@abstractmethod
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore):
|
||||
pass
|
||||
|
||||
|
||||
class import_url_list(Importer):
|
||||
"""
|
||||
Imports a list, can be in <code>https://example.com tag1, tag2, last tag</code> format
|
||||
"""
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
urls = data.split("\n")
|
||||
good = 0
|
||||
now = time.time()
|
||||
|
||||
if (len(urls) > 5000):
|
||||
flash("Importing 5,000 of the first URLs from your list, the rest can be imported again.")
|
||||
|
||||
for url in urls:
|
||||
url = url.strip()
|
||||
if not len(url):
|
||||
continue
|
||||
|
||||
tags = ""
|
||||
|
||||
# 'tags' should be a csv list after the URL
|
||||
if ' ' in url:
|
||||
url, tags = url.split(" ", 1)
|
||||
|
||||
# Flask wtform validators wont work with basic auth, use validators package
|
||||
# Up to 5000 per batch so we dont flood the server
|
||||
if len(url) and validators.url(url.replace('source:', '')) and good < 5000:
|
||||
new_uuid = datastore.add_watch(url=url.strip(), tag=tags, write_to_disk_now=False)
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
continue
|
||||
|
||||
# Worked past the 'continue' above, append it to the bad list
|
||||
if self.remaining_data is None:
|
||||
self.remaining_data = []
|
||||
self.remaining_data.append(url)
|
||||
|
||||
flash("{} Imported from list in {:.2f}s, {} Skipped.".format(good, time.time() - now, len(self.remaining_data)))
|
||||
|
||||
|
||||
class import_distill_io_json(Importer):
|
||||
def run(self,
|
||||
data,
|
||||
flash,
|
||||
datastore,
|
||||
):
|
||||
|
||||
import json
|
||||
good = 0
|
||||
now = time.time()
|
||||
self.new_uuids=[]
|
||||
|
||||
|
||||
try:
|
||||
data = json.loads(data.strip())
|
||||
except json.decoder.JSONDecodeError:
|
||||
flash("Unable to read JSON file, was it broken?", 'error')
|
||||
return
|
||||
|
||||
if not data.get('data'):
|
||||
flash("JSON structure looks invalid, was it broken?", 'error')
|
||||
return
|
||||
|
||||
for d in data.get('data'):
|
||||
d_config = json.loads(d['config'])
|
||||
extras = {'title': d.get('name', None)}
|
||||
|
||||
if len(d['uri']) and good < 5000:
|
||||
try:
|
||||
# @todo we only support CSS ones at the moment
|
||||
if d_config['selections'][0]['frames'][0]['excludes'][0]['type'] == 'css':
|
||||
extras['subtractive_selectors'] = d_config['selections'][0]['frames'][0]['excludes'][0]['expr']
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
try:
|
||||
extras['css_filter'] = d_config['selections'][0]['frames'][0]['includes'][0]['expr']
|
||||
if d_config['selections'][0]['frames'][0]['includes'][0]['type'] == 'xpath':
|
||||
extras['css_filter'] = 'xpath:' + extras['css_filter']
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
|
||||
if d.get('tags', False):
|
||||
extras['tag'] = ", ".join(d['tags'])
|
||||
|
||||
new_uuid = datastore.add_watch(url=d['uri'].strip(),
|
||||
extras=extras,
|
||||
write_to_disk_now=False)
|
||||
|
||||
if new_uuid:
|
||||
# Straight into the queue.
|
||||
self.new_uuids.append(new_uuid)
|
||||
good += 1
|
||||
|
||||
flash("{} Imported from Distill.io in {:.2f}s, {} Skipped.".format(len(self.new_uuids), time.time() - now, len(self.remaining_data)))
|
||||
53
changedetectionio/model/App.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from os import getenv
|
||||
from changedetectionio.notification import (
|
||||
default_notification_body,
|
||||
default_notification_format,
|
||||
default_notification_title,
|
||||
)
|
||||
|
||||
_FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT = 6
|
||||
|
||||
class model(dict):
|
||||
base_config = {
|
||||
'note': "Hello! If you change this file manually, please be sure to restart your changedetection.io instance!",
|
||||
'watching': {},
|
||||
'settings': {
|
||||
'headers': {
|
||||
'User-Agent': getenv("DEFAULT_SETTINGS_HEADERS_USERAGENT", 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.66 Safari/537.36'),
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
|
||||
'Accept-Encoding': 'gzip, deflate', # No support for brolti in python requests yet.
|
||||
'Accept-Language': 'en-GB,en-US;q=0.9,en;'
|
||||
},
|
||||
'requests': {
|
||||
'timeout': int(getenv("DEFAULT_SETTINGS_REQUESTS_TIMEOUT", "45")), # Default 45 seconds
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': 3, 'minutes': None, 'seconds': None},
|
||||
'jitter_seconds': 0,
|
||||
'workers': int(getenv("DEFAULT_SETTINGS_REQUESTS_WORKERS", "10")), # Number of threads, lower is better for slow connections
|
||||
'proxy': None # Preferred proxy connection
|
||||
},
|
||||
'application': {
|
||||
'api_access_token_enabled': True,
|
||||
'password': False,
|
||||
'base_url' : None,
|
||||
'extract_title_as_title': False,
|
||||
'empty_pages_are_a_change': False,
|
||||
'fetch_backend': getenv("DEFAULT_FETCH_BACKEND", "html_requests"),
|
||||
'filter_failure_notification_threshold_attempts': _FILTER_FAILURE_THRESHOLD_ATTEMPTS_DEFAULT,
|
||||
'global_ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
'global_subtractive_selectors': [],
|
||||
'ignore_whitespace': True,
|
||||
'render_anchor_tag_content': False,
|
||||
'notification_urls': [], # Apprise URL list
|
||||
# Custom notification content
|
||||
'notification_title': default_notification_title,
|
||||
'notification_body': default_notification_body,
|
||||
'notification_format': default_notification_format,
|
||||
'schema_version' : 0,
|
||||
'webdriver_delay': None # Extra delay in seconds before extracting text
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
self.update(self.base_config)
|
||||
253
changedetectionio/model/Watch.py
Normal file
@@ -0,0 +1,253 @@
|
||||
import os
|
||||
import uuid as uuid_builder
|
||||
from distutils.util import strtobool
|
||||
|
||||
minimum_seconds_recheck_time = int(os.getenv('MINIMUM_SECONDS_RECHECK_TIME', 60))
|
||||
mtable = {'seconds': 1, 'minutes': 60, 'hours': 3600, 'days': 86400, 'weeks': 86400 * 7}
|
||||
|
||||
from changedetectionio.notification import (
|
||||
default_notification_format_for_watch
|
||||
)
|
||||
|
||||
|
||||
class model(dict):
|
||||
__newest_history_key = None
|
||||
__history_n=0
|
||||
__base_config = {
|
||||
'url': None,
|
||||
'tag': None,
|
||||
'last_checked': 0,
|
||||
'paused': False,
|
||||
'last_viewed': 0, # history key value of the last viewed via the [diff] link
|
||||
#'newest_history_key': 0,
|
||||
'title': None,
|
||||
'previous_md5': False,
|
||||
'uuid': str(uuid_builder.uuid4()),
|
||||
'headers': {}, # Extra headers to send
|
||||
'body': None,
|
||||
'method': 'GET',
|
||||
#'history': {}, # Dict of timestamp and output stripped filename
|
||||
'ignore_text': [], # List of text to ignore when calculating the comparison checksum
|
||||
# Custom notification content
|
||||
'notification_urls': [], # List of URLs to add to the notification Queue (Usually AppRise)
|
||||
'notification_title': None,
|
||||
'notification_body': None,
|
||||
'notification_format': default_notification_format_for_watch,
|
||||
'notification_muted': False,
|
||||
'css_filter': '',
|
||||
'last_error': False,
|
||||
'extract_text': [], # Extract text by regex after filters
|
||||
'subtractive_selectors': [],
|
||||
'trigger_text': [], # List of text or regex to wait for until a change is detected
|
||||
'text_should_not_be_present': [], # Text that should not present
|
||||
'fetch_backend': None,
|
||||
'filter_failure_notification_send': strtobool(os.getenv('FILTER_FAILURE_NOTIFICATION_SEND_DEFAULT', 'True')),
|
||||
'consecutive_filter_failures': 0, # Every time the CSS/xPath filter cannot be located, reset when all is fine.
|
||||
'extract_title_as_title': False,
|
||||
'check_unique_lines': False, # On change-detected, compare against all history if its something new
|
||||
'proxy': None, # Preferred proxy connection
|
||||
# Re #110, so then if this is set to None, we know to use the default value instead
|
||||
# Requires setting to None on submit if it's the same as the default
|
||||
# Should be all None by default, so we use the system default in this case.
|
||||
'time_between_check': {'weeks': None, 'days': None, 'hours': None, 'minutes': None, 'seconds': None},
|
||||
'webdriver_delay': None,
|
||||
'webdriver_js_execute_code': None, # Run before change-detection
|
||||
}
|
||||
jitter_seconds = 0
|
||||
|
||||
def __init__(self, *arg, **kw):
|
||||
|
||||
self.update(self.__base_config)
|
||||
self.__datastore_path = kw['datastore_path']
|
||||
|
||||
self['uuid'] = str(uuid_builder.uuid4())
|
||||
|
||||
del kw['datastore_path']
|
||||
|
||||
if kw.get('default'):
|
||||
self.update(kw['default'])
|
||||
del kw['default']
|
||||
|
||||
# Be sure the cached timestamp is ready
|
||||
bump = self.history
|
||||
|
||||
# Goes at the end so we update the default object with the initialiser
|
||||
super(model, self).__init__(*arg, **kw)
|
||||
|
||||
@property
|
||||
def viewed(self):
|
||||
if int(self['last_viewed']) >= int(self.newest_history_key) :
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def ensure_data_dir_exists(self):
|
||||
target_path = os.path.join(self.__datastore_path, self['uuid'])
|
||||
if not os.path.isdir(target_path):
|
||||
print ("> Creating data dir {}".format(target_path))
|
||||
os.mkdir(target_path)
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
# Used for sorting
|
||||
if self['title']:
|
||||
return self['title']
|
||||
return self['url']
|
||||
|
||||
@property
|
||||
def last_changed(self):
|
||||
# last_changed will be the newest snapshot, but when we have just one snapshot, it should be 0
|
||||
if self.__history_n <= 1:
|
||||
return 0
|
||||
if self.__newest_history_key:
|
||||
return int(self.__newest_history_key)
|
||||
return 0
|
||||
|
||||
@property
|
||||
def history_n(self):
|
||||
return self.__history_n
|
||||
|
||||
@property
|
||||
def history(self):
|
||||
tmp_history = {}
|
||||
import logging
|
||||
import time
|
||||
|
||||
# Read the history file as a dict
|
||||
fname = os.path.join(self.__datastore_path, self.get('uuid'), "history.txt")
|
||||
if os.path.isfile(fname):
|
||||
logging.debug("Reading history index " + str(time.time()))
|
||||
with open(fname, "r") as f:
|
||||
tmp_history = dict(i.strip().split(',', 2) for i in f.readlines())
|
||||
|
||||
if len(tmp_history):
|
||||
self.__newest_history_key = list(tmp_history.keys())[-1]
|
||||
|
||||
self.__history_n = len(tmp_history)
|
||||
|
||||
return tmp_history
|
||||
|
||||
@property
|
||||
def has_history(self):
|
||||
fname = os.path.join(self.__datastore_path, self.get('uuid'), "history.txt")
|
||||
return os.path.isfile(fname)
|
||||
|
||||
# Returns the newest key, but if theres only 1 record, then it's counted as not being new, so return 0.
|
||||
@property
|
||||
def newest_history_key(self):
|
||||
if self.__newest_history_key is not None:
|
||||
return self.__newest_history_key
|
||||
|
||||
if len(self.history) <= 1:
|
||||
return 0
|
||||
|
||||
|
||||
bump = self.history
|
||||
return self.__newest_history_key
|
||||
|
||||
# Save some text file to the appropriate path and bump the history
|
||||
# result_obj from fetch_site_status.run()
|
||||
def save_history_text(self, contents, timestamp):
|
||||
import uuid
|
||||
import logging
|
||||
|
||||
output_path = "{}/{}".format(self.__datastore_path, self['uuid'])
|
||||
|
||||
self.ensure_data_dir_exists()
|
||||
|
||||
snapshot_fname = "{}/{}.stripped.txt".format(output_path, uuid.uuid4())
|
||||
logging.debug("Saving history text {}".format(snapshot_fname))
|
||||
|
||||
with open(snapshot_fname, 'wb') as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
# Append to index
|
||||
# @todo check last char was \n
|
||||
index_fname = "{}/history.txt".format(output_path)
|
||||
with open(index_fname, 'a') as f:
|
||||
f.write("{},{}\n".format(timestamp, snapshot_fname))
|
||||
f.close()
|
||||
|
||||
self.__newest_history_key = timestamp
|
||||
self.__history_n+=1
|
||||
|
||||
#@todo bump static cache of the last timestamp so we dont need to examine the file to set a proper ''viewed'' status
|
||||
return snapshot_fname
|
||||
|
||||
@property
|
||||
def has_empty_checktime(self):
|
||||
# using all() + dictionary comprehension
|
||||
# Check if all values are 0 in dictionary
|
||||
res = all(x == None or x == False or x==0 for x in self.get('time_between_check', {}).values())
|
||||
return res
|
||||
|
||||
def threshold_seconds(self):
|
||||
seconds = 0
|
||||
for m, n in mtable.items():
|
||||
x = self.get('time_between_check', {}).get(m, None)
|
||||
if x:
|
||||
seconds += x * n
|
||||
return seconds
|
||||
|
||||
# Iterate over all history texts and see if something new exists
|
||||
def lines_contain_something_unique_compared_to_history(self, lines: list):
|
||||
local_lines = set([l.decode('utf-8').strip().lower() for l in lines])
|
||||
|
||||
# Compare each lines (set) against each history text file (set) looking for something new..
|
||||
existing_history = set({})
|
||||
for k, v in self.history.items():
|
||||
alist = set([line.decode('utf-8').strip().lower() for line in open(v, 'rb')])
|
||||
existing_history = existing_history.union(alist)
|
||||
|
||||
# Check that everything in local_lines(new stuff) already exists in existing_history - it should
|
||||
# if not, something new happened
|
||||
return not local_lines.issubset(existing_history)
|
||||
|
||||
def get_screenshot(self):
|
||||
fname = os.path.join(self.__datastore_path, self['uuid'], "last-screenshot.png")
|
||||
if os.path.isfile(fname):
|
||||
return fname
|
||||
|
||||
return False
|
||||
|
||||
def __get_file_ctime(self, filename):
|
||||
fname = os.path.join(self.__datastore_path, self['uuid'], filename)
|
||||
if os.path.isfile(fname):
|
||||
return int(os.path.getmtime(fname))
|
||||
return False
|
||||
|
||||
@property
|
||||
def error_text_ctime(self):
|
||||
return self.__get_file_ctime('last-error.txt')
|
||||
|
||||
@property
|
||||
def snapshot_text_ctime(self):
|
||||
if self.history_n==0:
|
||||
return False
|
||||
|
||||
timestamp = list(self.history.keys())[-1]
|
||||
return int(timestamp)
|
||||
|
||||
@property
|
||||
def snapshot_screenshot_ctime(self):
|
||||
return self.__get_file_ctime('last-screenshot.png')
|
||||
|
||||
@property
|
||||
def snapshot_error_screenshot_ctime(self):
|
||||
return self.__get_file_ctime('last-error-screenshot.png')
|
||||
|
||||
def get_error_text(self):
|
||||
"""Return the text saved from a previous request that resulted in a non-200 error"""
|
||||
fname = os.path.join(self.__datastore_path, self['uuid'], "last-error.txt")
|
||||
if os.path.isfile(fname):
|
||||
with open(fname, 'r') as f:
|
||||
return f.read()
|
||||
return False
|
||||
|
||||
def get_error_snapshot(self):
|
||||
"""Return path to the screenshot that resulted in a non-200 error"""
|
||||
fname = os.path.join(self.__datastore_path, self['uuid'], "last-error-screenshot.png")
|
||||
if os.path.isfile(fname):
|
||||
return fname
|
||||
return False
|
||||
0
changedetectionio/model/__init__.py
Normal file
170
changedetectionio/notification.py
Normal file
@@ -0,0 +1,170 @@
|
||||
import apprise
|
||||
from apprise import NotifyFormat
|
||||
|
||||
valid_tokens = {
|
||||
'base_url': '',
|
||||
'watch_url': '',
|
||||
'watch_uuid': '',
|
||||
'watch_title': '',
|
||||
'watch_tag': '',
|
||||
'diff': '',
|
||||
'diff_full': '',
|
||||
'diff_url': '',
|
||||
'preview_url': '',
|
||||
'current_snapshot': ''
|
||||
}
|
||||
|
||||
default_notification_format_for_watch = 'System default'
|
||||
default_notification_format = 'Text'
|
||||
default_notification_body = '{watch_url} had a change.\n---\n{diff}\n---\n'
|
||||
default_notification_title = 'ChangeDetection.io Notification - {watch_url}'
|
||||
|
||||
valid_notification_formats = {
|
||||
'Text': NotifyFormat.TEXT,
|
||||
'Markdown': NotifyFormat.MARKDOWN,
|
||||
'HTML': NotifyFormat.HTML,
|
||||
# Used only for editing a watch (not for global)
|
||||
default_notification_format_for_watch: default_notification_format_for_watch
|
||||
}
|
||||
|
||||
def process_notification(n_object, datastore):
|
||||
|
||||
# Get the notification body from datastore
|
||||
n_body = n_object.get('notification_body', default_notification_body)
|
||||
n_title = n_object.get('notification_title', default_notification_title)
|
||||
n_format = valid_notification_formats.get(
|
||||
n_object['notification_format'],
|
||||
valid_notification_formats[default_notification_format],
|
||||
)
|
||||
|
||||
# Insert variables into the notification content
|
||||
notification_parameters = create_notification_parameters(n_object, datastore)
|
||||
|
||||
for n_k in notification_parameters:
|
||||
token = '{' + n_k + '}'
|
||||
val = notification_parameters[n_k]
|
||||
n_title = n_title.replace(token, val)
|
||||
n_body = n_body.replace(token, val)
|
||||
|
||||
# https://github.com/caronc/apprise/wiki/Development_LogCapture
|
||||
# Anything higher than or equal to WARNING (which covers things like Connection errors)
|
||||
# raise it as an exception
|
||||
apobjs=[]
|
||||
sent_objs=[]
|
||||
from .apprise_asset import asset
|
||||
for url in n_object['notification_urls']:
|
||||
apobj = apprise.Apprise(debug=True, asset=asset)
|
||||
url = url.strip()
|
||||
if len(url):
|
||||
print(">> Process Notification: AppRise notifying {}".format(url))
|
||||
with apprise.LogCapture(level=apprise.logging.DEBUG) as logs:
|
||||
# Re 323 - Limit discord length to their 2000 char limit total or it wont send.
|
||||
# Because different notifications may require different pre-processing, run each sequentially :(
|
||||
# 2000 bytes minus -
|
||||
# 200 bytes for the overhead of the _entire_ json payload, 200 bytes for {tts, wait, content} etc headers
|
||||
# Length of URL - Incase they specify a longer custom avatar_url
|
||||
|
||||
# So if no avatar_url is specified, add one so it can be correctly calculated into the total payload
|
||||
k = '?' if not '?' in url else '&'
|
||||
if not 'avatar_url' in url and not url.startswith('mail'):
|
||||
url += k + 'avatar_url=https://raw.githubusercontent.com/dgtlmoon/changedetection.io/master/changedetectionio/static/images/avatar-256x256.png'
|
||||
|
||||
if url.startswith('tgram://'):
|
||||
# Telegram only supports a limit subset of HTML, remove the '<br/>' we place in.
|
||||
# re https://github.com/dgtlmoon/changedetection.io/issues/555
|
||||
# @todo re-use an existing library we have already imported to strip all non-allowed tags
|
||||
n_body = n_body.replace('<br/>', '\n')
|
||||
n_body = n_body.replace('</br>', '\n')
|
||||
# real limit is 4096, but minus some for extra metadata
|
||||
payload_max_size = 3600
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
n_title = n_title[0:payload_max_size]
|
||||
n_body = n_body[0:body_limit]
|
||||
|
||||
elif url.startswith('discord://') or url.startswith('https://discordapp.com/api/webhooks') or url.startswith('https://discord.com/api'):
|
||||
# real limit is 2000, but minus some for extra metadata
|
||||
payload_max_size = 1700
|
||||
body_limit = max(0, payload_max_size - len(n_title))
|
||||
n_title = n_title[0:payload_max_size]
|
||||
n_body = n_body[0:body_limit]
|
||||
|
||||
elif url.startswith('mailto'):
|
||||
# Apprise will default to HTML, so we need to override it
|
||||
# So that whats' generated in n_body is in line with what is going to be sent.
|
||||
# https://github.com/caronc/apprise/issues/633#issuecomment-1191449321
|
||||
if not 'format=' in url and (n_format == 'text' or n_format == 'markdown'):
|
||||
prefix = '?' if not '?' in url else '&'
|
||||
url = "{}{}format={}".format(url, prefix, n_format)
|
||||
|
||||
apobj.add(url)
|
||||
|
||||
apobj.notify(
|
||||
title=n_title,
|
||||
body=n_body,
|
||||
body_format=n_format)
|
||||
|
||||
apobj.clear()
|
||||
|
||||
# Incase it needs to exist in memory for a while after to process(?)
|
||||
apobjs.append(apobj)
|
||||
|
||||
# Returns empty string if nothing found, multi-line string otherwise
|
||||
log_value = logs.getvalue()
|
||||
if log_value and 'WARNING' in log_value or 'ERROR' in log_value:
|
||||
raise Exception(log_value)
|
||||
|
||||
sent_objs.append({'title': n_title,
|
||||
'body': n_body,
|
||||
'url' : url,
|
||||
'body_format': n_format})
|
||||
|
||||
# Return what was sent for better logging - after the for loop
|
||||
return sent_objs
|
||||
|
||||
|
||||
# Notification title + body content parameters get created here.
|
||||
def create_notification_parameters(n_object, datastore):
|
||||
from copy import deepcopy
|
||||
|
||||
# in the case we send a test notification from the main settings, there is no UUID.
|
||||
uuid = n_object['uuid'] if 'uuid' in n_object else ''
|
||||
|
||||
if uuid != '':
|
||||
watch_title = datastore.data['watching'][uuid]['title']
|
||||
watch_tag = datastore.data['watching'][uuid]['tag']
|
||||
else:
|
||||
watch_title = 'Change Detection'
|
||||
watch_tag = ''
|
||||
|
||||
# Create URLs to customise the notification with
|
||||
base_url = datastore.data['settings']['application']['base_url']
|
||||
|
||||
watch_url = n_object['watch_url']
|
||||
|
||||
# Re #148 - Some people have just {base_url} in the body or title, but this may break some notification services
|
||||
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
|
||||
if base_url == '':
|
||||
base_url = "<base-url-env-var-not-set>"
|
||||
|
||||
diff_url = "{}/diff/{}".format(base_url, uuid)
|
||||
preview_url = "{}/preview/{}".format(base_url, uuid)
|
||||
|
||||
# Not sure deepcopy is needed here, but why not
|
||||
tokens = deepcopy(valid_tokens)
|
||||
|
||||
# Valid_tokens also used as a field validator
|
||||
tokens.update(
|
||||
{
|
||||
'base_url': base_url if base_url is not None else '',
|
||||
'watch_url': watch_url,
|
||||
'watch_uuid': uuid,
|
||||
'watch_title': watch_title if watch_title is not None else '',
|
||||
'watch_tag': watch_tag if watch_tag is not None else '',
|
||||
'diff_url': diff_url,
|
||||
'diff': n_object.get('diff', ''), # Null default in the case we use a test
|
||||
'diff_full': n_object.get('diff_full', ''), # Null default in the case we use a test
|
||||
'preview_url': preview_url,
|
||||
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
|
||||
})
|
||||
|
||||
return tokens
|
||||
51
changedetectionio/run_all_tests.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
|
||||
|
||||
# live_server will throw errors even with live_server_scope=function if I have the live_server setup in different functions
|
||||
# and I like to restart the server for each test (and have the test cleanup after each test)
|
||||
# merge request welcome :)
|
||||
|
||||
|
||||
# exit when any command fails
|
||||
set -e
|
||||
|
||||
find tests/test_*py -type f|while read test_name
|
||||
do
|
||||
echo "TEST RUNNING $test_name"
|
||||
pytest $test_name
|
||||
done
|
||||
|
||||
echo "RUNNING WITH BASE_URL SET"
|
||||
|
||||
# Now re-run some tests with BASE_URL enabled
|
||||
# Re #65 - Ability to include a link back to the installation, in the notification.
|
||||
export BASE_URL="https://really-unique-domain.io"
|
||||
pytest tests/test_notification.py
|
||||
|
||||
|
||||
# Now for the selenium and playwright/browserless fetchers
|
||||
# Note - this is not UI functional tests - just checking that each one can fetch the content
|
||||
|
||||
echo "TESTING WEBDRIVER FETCH > SELENIUM/WEBDRIVER..."
|
||||
docker run -d --name $$-test_selenium -p 4444:4444 --rm --shm-size="2g" selenium/standalone-chrome-debug:3.141.59
|
||||
# takes a while to spin up
|
||||
sleep 5
|
||||
export WEBDRIVER_URL=http://localhost:4444/wd/hub
|
||||
pytest tests/fetchers/test_content.py
|
||||
pytest tests/test_errorhandling.py
|
||||
unset WEBDRIVER_URL
|
||||
docker kill $$-test_selenium
|
||||
|
||||
echo "TESTING WEBDRIVER FETCH > PLAYWRIGHT/BROWSERLESS..."
|
||||
# Not all platforms support playwright (not ARM/rPI), so it's not packaged in requirements.txt
|
||||
pip3 install playwright~=1.24
|
||||
docker run -d --name $$-test_browserless -e "DEFAULT_LAUNCH_ARGS=[\"--window-size=1920,1080\"]" --rm -p 3000:3000 --shm-size="2g" browserless/chrome:1.53-chrome-stable
|
||||
# takes a while to spin up
|
||||
sleep 5
|
||||
export PLAYWRIGHT_DRIVER_URL=ws://127.0.0.1:3000
|
||||
pytest tests/fetchers/test_content.py
|
||||
pytest tests/test_errorhandling.py
|
||||
pytest tests/visualselector/test_fetch_data.py
|
||||
|
||||
unset PLAYWRIGHT_DRIVER_URL
|
||||
docker kill $$-test_browserless
|
||||
|
Before Width: | Height: | Size: 569 B After Width: | Height: | Size: 569 B |
BIN
changedetectionio/static/images/Google-Chrome-icon.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
changedetectionio/static/images/Playwright-icon.png
Normal file
|
After Width: | Height: | Size: 6.2 KiB |
BIN
changedetectionio/static/images/avatar-256x256.png
Normal file
|
After Width: | Height: | Size: 38 KiB |
42
changedetectionio/static/images/bell-off.svg
Normal file
@@ -0,0 +1,42 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="15"
|
||||
height="16.363636"
|
||||
viewBox="0 0 15 16.363636"
|
||||
version="1.1"
|
||||
id="svg4"
|
||||
sodipodi:docname="bell-off.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview5"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="-0.59824046"
|
||||
inkscape:cy="12"
|
||||
inkscape:window-width="1554"
|
||||
inkscape:window-height="896"
|
||||
inkscape:window-x="2095"
|
||||
inkscape:window-y="107"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="svg4" />
|
||||
<defs
|
||||
id="defs8" />
|
||||
<path
|
||||
d="m 14.318182,11.762045 v 1.1925 H 5.4102273 L 11.849318,7.1140909 C 12.234545,9.1561364 12.54,11.181818 14.318182,11.762045 Z m -6.7984093,4.601591 c 1.0759091,0 2.0256823,-0.955909 2.0256823,-2.045454 H 5.4545455 c 0,1.089545 0.9879545,2.045454 2.0652272,2.045454 z M 15,2.8622727 0.9177273,15.636136 0,14.627045 l 1.8443182,-1.6725 h -1.1625 v -1.1925 C 4.0070455,10.677273 2.1784091,4.5388636 5.3611364,2.6897727 5.8009091,2.4347727 6.0709091,1.9609091 6.0702273,1.4488636 v -0.00205 C 6.0702273,0.64772727 6.7104545,0 7.5,0 8.2895455,0 8.9297727,0.64772727 8.9297727,1.4468182 v 0.00205 C 8.9290909,1.9602319 9.199773,2.4354591 9.638864,2.6897773 10.364318,3.111141 10.827273,3.7568228 11.1525,4.5129591 L 14.085682,1.8531818 Z M 6.8181818,1.3636364 C 6.8181818,1.74 7.1236364,2.0454545 7.5,2.0454545 7.8763636,2.0454545 8.1818182,1.74 8.1818182,1.3636364 8.1818182,0.98795455 7.8763636,0.68181818 7.5,0.68181818 c -0.3763636,0 -0.6818182,0.30613637 -0.6818182,0.68181822 z"
|
||||
id="path2"
|
||||
style="fill:#f8321b;stroke-width:0.681818;fill-opacity:1" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
BIN
changedetectionio/static/images/beta-logo.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
40
changedetectionio/static/images/copy.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
id="Layer_1"
|
||||
x="0px"
|
||||
y="0px"
|
||||
viewBox="0 0 115.77 122.88"
|
||||
style="enable-background:new 0 0 115.77 122.88"
|
||||
xml:space="preserve"
|
||||
sodipodi:docname="copy.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
||||
id="defs11" /><sodipodi:namedview
|
||||
id="namedview9"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
inkscape:zoom="5.5501303"
|
||||
inkscape:cx="57.83648"
|
||||
inkscape:cy="61.439999"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="g6" /><style
|
||||
type="text/css"
|
||||
id="style2">.st0{fill-rule:evenodd;clip-rule:evenodd;}</style><g
|
||||
id="g6"><path
|
||||
class="st0"
|
||||
d="M89.62,13.96v7.73h12.19h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02v0.02 v73.27v0.01h-0.02c-0.01,3.84-1.57,7.33-4.1,9.86c-2.51,2.5-5.98,4.06-9.82,4.07v0.02h-0.02h-61.7H40.1v-0.02 c-3.84-0.01-7.34-1.57-9.86-4.1c-2.5-2.51-4.06-5.98-4.07-9.82h-0.02v-0.02V92.51H13.96h-0.01v-0.02c-3.84-0.01-7.34-1.57-9.86-4.1 c-2.5-2.51-4.06-5.98-4.07-9.82H0v-0.02V13.96v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07V0h0.02h61.7 h0.01v0.02c3.85,0.01,7.34,1.57,9.86,4.1c2.5,2.51,4.06,5.98,4.07,9.82h0.02V13.96L89.62,13.96z M79.04,21.69v-7.73v-0.02h0.02 c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v64.59v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h12.19V35.65 v-0.01h0.02c0.01-3.85,1.58-7.34,4.1-9.86c2.51-2.5,5.98-4.06,9.82-4.07v-0.02h0.02H79.04L79.04,21.69z M105.18,108.92V35.65v-0.02 h0.02c0-0.91-0.39-1.75-1.01-2.37c-0.61-0.61-1.46-1-2.37-1v0.02h-0.01h-61.7h-0.02v-0.02c-0.91,0-1.75,0.39-2.37,1.01 c-0.61,0.61-1,1.46-1,2.37h0.02v0.01v73.27v0.02h-0.02c0,0.91,0.39,1.75,1.01,2.37c0.61,0.61,1.46,1,2.37,1v-0.02h0.01h61.7h0.02 v0.02c0.91,0,1.75-0.39,2.37-1.01c0.61-0.61,1-1.46,1-2.37h-0.02V108.92L105.18,108.92z"
|
||||
id="path4"
|
||||
style="fill:#ffffff;fill-opacity:1" /></g></svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
BIN
changedetectionio/static/images/favicon.ico
Normal file
|
After Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
51
changedetectionio/static/images/notice.svg
Normal file
@@ -0,0 +1,51 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="20.108334mm"
|
||||
height="21.43125mm"
|
||||
viewBox="0 0 20.108334 21.43125"
|
||||
version="1.1"
|
||||
id="svg5"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs2" />
|
||||
<g
|
||||
id="layer1"
|
||||
transform="translate(-141.05873,-76.816635)">
|
||||
<image
|
||||
width="20.108334"
|
||||
height="21.43125"
|
||||
preserveAspectRatio="none"
|
||||
style="image-rendering:optimizeQuality"
|
||||
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEwAAABRCAYAAAB430BuAAAABHNCSVQICAgIfAhkiAAABLxJREFU
|
||||
eJztnN2Z2jgUhl8Z7petIGwF0WMXsFBBoIKwFWS2gmQryKSCJRXsTAUDBTDRVBCmgkAB9tkLexh+
|
||||
bIONLGwP7xU2RjafpaOjoyNBCxHNQAJEfG5sl+3ZLrAWeAyST5/sF91mFH3bRbZbsAq4ClaQq2B7
|
||||
iKYnmg9Z318F20ICRnj8pMOd6E3HscNVsATxmQD/oeghPCnDLO26q2AkYin+TQ7XREyyrn3zgu2J
|
||||
BSEjZTBZ179pwQ7EEv7KaoovvFnBUsV6ZHrsd+0WTHhKPV1SLGivYEsA1KEtEs2grFitRjQ65VxP
|
||||
fH5JgEjAKsvXupKwFfYxaYJeSeHcWqVSCuwD7/HQQD8lRHLWDStBWG3slbAElkTc5/lTZdkIJhpN
|
||||
h6/UUZDyzAgZK8PKVoEKErE8HlD0bBVcI2ZqwdBWYbFgAT+g1UZwrBbcvRyIpofHJ1Sh1rQCZt1k
|
||||
lN5msQAm8CoYoFF8KVHOsFtQ5aayExBUhpnopJl6J/3/FREGWCrxmaH40/4z1oyQ320Yf5dDozXC
|
||||
P4QMCRkCY4S5w/tbMTtd4L2Ngo6wJmSQ4hfdScAU+OjgGazgOXEl8oJyof3Z6Spx0iTzgnLKsMoK
|
||||
w9SRuoR3rHniVVMXwRpDXQR7d+kHOJV6CFZB0khVOBGsTcE6VzWsNVGQizfJptU+N4LlD3AbVfsu
|
||||
XsOahhvB8nrB08IrtcGNYNIct+EYl2+S6mr0D8kLUMrV6BfFRTzOGs4Ey8p1aNrUnssaliaMO/vV
|
||||
sfNi3AmW5j54DgUTO/dyJ1hab9iwHhLcNskP23ZMND0kewFBXek6vZvHg/hMiUPSN00z+OBasFig
|
||||
y8wSRfnZ0adSBz+sUVwFK4jbJhnPP06To1ETczpcCnavHhltHd82LU0AXDbJMGXBU8PSBAA8Jxk0
|
||||
wnNaqlGSJuAyg+dsXIV38iZqXU3iWsmodhetSNlDQgJGriZxbWVSe1hS/gQ+S/C6j4QEfES21vxU
|
||||
icXsoC4vC5mqJvbybyXgduucG/YWaYmmj+IdHvpoxFdt8ltRP5h3iZjRqfBh60C4t1rNY7rxAU95
|
||||
aYnhEp+/u8pgxGfeRCfyJIR5SkLfFOHYXMMzu63PEDF9WQnSo8MUmhduyUWYEzGyvnRmU3683ugG
|
||||
GAG/2bqJU4RnFDNCpsfWb5chswUnwb5Xg+hxiyo9w7MGJoSVpmYulam+A8scS+5nPYtf+s9mpZw7
|
||||
J1nayDnCVuu4Ck+E6DqIBYDHHR1+is/n8kVUhfBExMBFMzm4taafkXcWL9BSfBG/nNN8sutYcE3S
|
||||
d7XI3o6lSpIe/xcAIX/svzDxMVu22BAyLNKL2q9hwrdLiZWwXbP6B99GDLaGSpoOD6JPn4yxK1i8
|
||||
B0StY1zKsCJiQNxzQ0HRbAm2BsZN2TBDGVaE5USzIVjsNix2VrzWHmUwB6J5fD32uyKCzQ7OxG5D
|
||||
vzZuQ0E2osXjRlBMjvWe5WtYPE4b2BynXQJlMEToTUegmEiwM1mzQ1nBvqvH5ov1wlZHcA+AZHdc
|
||||
xQW7vNuQS9kBtzKs1IIRMM7b0q/YvGTzto4qbFutdV5FnLtLk2x3JVWUfXKTbIu9Opc2J6Osj19S
|
||||
HLfJKO64r6rg/wFBX3+2ZapW8wAAAABJRU5ErkJggg==
|
||||
"
|
||||
id="image832"
|
||||
x="141.05873"
|
||||
y="76.816635" />
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.4 KiB |
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 2.9 KiB |
122
changedetectionio/static/images/play.svg
Normal file
@@ -0,0 +1,122 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
version="1.1"
|
||||
id="Capa_1"
|
||||
x="0px"
|
||||
y="0px"
|
||||
viewBox="0 0 15 14.998326"
|
||||
xml:space="preserve"
|
||||
width="15"
|
||||
height="14.998326"
|
||||
sodipodi:docname="play.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"><sodipodi:namedview
|
||||
id="namedview21"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
inkscape:zoom="45.47174"
|
||||
inkscape:cx="7.4991632"
|
||||
inkscape:cy="7.4991632"
|
||||
inkscape:window-width="1554"
|
||||
inkscape:window-height="896"
|
||||
inkscape:window-x="3048"
|
||||
inkscape:window-y="227"
|
||||
inkscape:window-maximized="0"
|
||||
inkscape:current-layer="Capa_1" /><metadata
|
||||
id="metadata39"><rdf:RDF><cc:Work
|
||||
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
|
||||
id="defs37" />
|
||||
<path
|
||||
id="path2"
|
||||
style="fill:#1b98f8;fill-opacity:1;stroke-width:0.0292893"
|
||||
d="M 7.4980469,0 C 4.5496028,-0.04093755 1.7047721,1.8547661 0.58789062,4.5800781 -0.57819305,7.2574082 0.02636631,10.583252 2.0703125,12.671875 4.0368718,14.788335 7.2754393,15.560096 9.9882812,14.572266 12.800219,13.617028 14.874915,10.855516 14.986328,7.8847656 15.172991,4.9968456 13.497714,2.109448 10.910156,0.8203125 9.858961,0.28011352 8.6796569,-0.00179908 7.4980469,0 Z"
|
||||
sodipodi:nodetypes="ccccccc" />
|
||||
<g
|
||||
id="g4"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g6"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g8"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g10"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g12"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g14"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g16"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g18"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g20"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g22"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g24"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g26"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g28"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g30"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<g
|
||||
id="g32"
|
||||
transform="translate(-0.01903604,0.02221043)">
|
||||
</g>
|
||||
<path
|
||||
sodipodi:type="star"
|
||||
style="fill:#ffffff;fill-opacity:1;stroke-width:37.7953;paint-order:stroke fill markers"
|
||||
id="path1203"
|
||||
inkscape:flatsided="false"
|
||||
sodipodi:sides="3"
|
||||
sodipodi:cx="7.2964563"
|
||||
sodipodi:cy="7.3240671"
|
||||
sodipodi:r1="3.805218"
|
||||
sodipodi:r2="1.9026089"
|
||||
sodipodi:arg1="-0.0017436774"
|
||||
sodipodi:arg2="1.0454539"
|
||||
inkscape:rounded="0"
|
||||
inkscape:randomized="0"
|
||||
d="M 11.101669,7.317432 8.2506324,8.9701135 5.3995964,10.622795 5.3938504,7.3273846 5.3881041,4.0319742 8.2448863,5.6747033 Z"
|
||||
inkscape:transform-center-x="-0.94843001"
|
||||
inkscape:transform-center-y="0.0033175346" /></svg>
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
20
changedetectionio/static/images/spread-white.svg
Normal file
@@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="18"
|
||||
height="19.92"
|
||||
viewBox="0 0 18 19.92"
|
||||
version="1.1"
|
||||
id="svg6"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs10" />
|
||||
<path
|
||||
d="M -3,-2 H 21 V 22 H -3 Z"
|
||||
fill="none"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
|
||||
id="path4"
|
||||
style="fill:#ffffff;fill-opacity:1" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 892 B |
46
changedetectionio/static/images/spread.svg
Normal file
@@ -0,0 +1,46 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
width="18"
|
||||
height="19.92"
|
||||
viewBox="0 0 18 19.92"
|
||||
version="1.1"
|
||||
id="svg6"
|
||||
sodipodi:docname="spread.svg"
|
||||
inkscape:version="1.1.1 (1:1.1+202109281949+c3084ef5ed)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs10" />
|
||||
<sodipodi:namedview
|
||||
id="namedview8"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
showgrid="false"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:zoom="28.416667"
|
||||
inkscape:cx="9.0087975"
|
||||
inkscape:cy="9.9941348"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg6" />
|
||||
<path
|
||||
d="M -3,-2 H 21 V 22 H -3 Z"
|
||||
fill="none"
|
||||
id="path2" />
|
||||
<path
|
||||
d="m 15,14.08 c -0.76,0 -1.44,0.3 -1.96,0.77 L 5.91,10.7 C 5.96,10.47 6,10.24 6,10 6,9.76 5.96,9.53 5.91,9.3 L 12.96,5.19 C 13.5,5.69 14.21,6 15,6 16.66,6 18,4.66 18,3 18,1.34 16.66,0 15,0 c -1.66,0 -3,1.34 -3,3 0,0.24 0.04,0.47 0.09,0.7 L 5.04,7.81 C 4.5,7.31 3.79,7 3,7 1.34,7 0,8.34 0,10 c 0,1.66 1.34,3 3,3 0.79,0 1.5,-0.31 2.04,-0.81 l 7.12,4.16 c -0.05,0.21 -0.08,0.43 -0.08,0.65 0,1.61 1.31,2.92 2.92,2.92 1.61,0 2.92,-1.31 2.92,-2.92 0,-1.61 -1.31,-2.92 -2.92,-2.92 z"
|
||||
id="path4"
|
||||
style="fill:#0078e7;fill-opacity:1" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
23
changedetectionio/static/js/diff-overview.js
Normal file
@@ -0,0 +1,23 @@
|
||||
$(document).ready(function () {
|
||||
// Load it when the #screenshot tab is in use, so we dont give a slow experience when waiting for the text diff to load
|
||||
window.addEventListener('hashchange', function (e) {
|
||||
toggle(location.hash);
|
||||
}, false);
|
||||
|
||||
toggle(location.hash);
|
||||
|
||||
function toggle(hash_name) {
|
||||
if (hash_name === '#screenshot') {
|
||||
$("img#screenshot-img").attr('src', screenshot_url);
|
||||
$("#settings").hide();
|
||||
} else if (hash_name === '#error-screenshot') {
|
||||
$("img#error-screenshot-img").attr('src', error_screenshot_url);
|
||||
$("#settings").hide();
|
||||
}
|
||||
|
||||
|
||||
else {
|
||||
$("#settings").show();
|
||||
}
|
||||
}
|
||||
});
|
||||
36
changedetectionio/static/js/global-settings.js
Normal file
@@ -0,0 +1,36 @@
|
||||
$(document).ready(function () {
|
||||
function toggle() {
|
||||
if ($('input[name="application-fetch_backend"]:checked').val() != 'html_requests') {
|
||||
$('#requests-override-options').hide();
|
||||
$('#webdriver-override-options').show();
|
||||
} else {
|
||||
$('#requests-override-options').show();
|
||||
$('#webdriver-override-options').hide();
|
||||
}
|
||||
}
|
||||
|
||||
$('input[name="application-fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
$("#api-key").hover(
|
||||
function () {
|
||||
$("#api-key-copy").html('copy').fadeIn();
|
||||
},
|
||||
function () {
|
||||
$("#api-key-copy").hide();
|
||||
}
|
||||
).click(function (e) {
|
||||
$("#api-key-copy").html('copied');
|
||||
var range = document.createRange();
|
||||
var n = $("#api-key")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
2
changedetectionio/static/js/jquery-3.6.0.min.js
vendored
Normal file
56
changedetectionio/static/js/limit.js
Normal file
@@ -0,0 +1,56 @@
|
||||
/**
|
||||
* debounce
|
||||
* @param {integer} milliseconds This param indicates the number of milliseconds
|
||||
* to wait after the last call before calling the original function.
|
||||
* @param {object} What "this" refers to in the returned function.
|
||||
* @return {function} This returns a function that when called will wait the
|
||||
* indicated number of milliseconds after the last call before
|
||||
* calling the original function.
|
||||
*/
|
||||
Function.prototype.debounce = function (milliseconds, context) {
|
||||
var baseFunction = this,
|
||||
timer = null,
|
||||
wait = milliseconds;
|
||||
|
||||
return function () {
|
||||
var self = context || this,
|
||||
args = arguments;
|
||||
|
||||
function complete() {
|
||||
baseFunction.apply(self, args);
|
||||
timer = null;
|
||||
}
|
||||
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
|
||||
timer = setTimeout(complete, wait);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* throttle
|
||||
* @param {integer} milliseconds This param indicates the number of milliseconds
|
||||
* to wait between calls before calling the original function.
|
||||
* @param {object} What "this" refers to in the returned function.
|
||||
* @return {function} This returns a function that when called will wait the
|
||||
* indicated number of milliseconds between calls before
|
||||
* calling the original function.
|
||||
*/
|
||||
Function.prototype.throttle = function (milliseconds, context) {
|
||||
var baseFunction = this,
|
||||
lastEventTimestamp = null,
|
||||
limit = milliseconds;
|
||||
|
||||
return function () {
|
||||
var self = context || this,
|
||||
args = arguments,
|
||||
now = Date.now();
|
||||
|
||||
if (!lastEventTimestamp || now - lastEventTimestamp >= limit) {
|
||||
lastEventTimestamp = now;
|
||||
baseFunction.apply(self, args);
|
||||
}
|
||||
};
|
||||
};
|
||||
59
changedetectionio/static/js/notifications.js
Normal file
@@ -0,0 +1,59 @@
|
||||
$(document).ready(function() {
|
||||
|
||||
$('#add-email-helper').click(function (e) {
|
||||
e.preventDefault();
|
||||
email = prompt("Destination email");
|
||||
if(email) {
|
||||
var n = $(".notification-urls");
|
||||
var p=email_notification_prefix;
|
||||
$(n).val( $.trim( $(n).val() )+"\n"+email_notification_prefix+email );
|
||||
}
|
||||
});
|
||||
|
||||
$('#send-test-notification').click(function (e) {
|
||||
e.preventDefault();
|
||||
|
||||
// this can be global
|
||||
var csrftoken = $('input[name=csrf_token]').val();
|
||||
$.ajaxSetup({
|
||||
beforeSend: function(xhr, settings) {
|
||||
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
|
||||
xhr.setRequestHeader("X-CSRFToken", csrftoken)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
data = {
|
||||
window_url : window.location.href,
|
||||
notification_urls : $('.notification-urls').val(),
|
||||
notification_title : $('.notification-title').val(),
|
||||
notification_body : $('.notification-body').val(),
|
||||
notification_format : $('.notification-format').val(),
|
||||
}
|
||||
for (key in data) {
|
||||
if (!data[key].length) {
|
||||
alert(key+" is empty, cannot send test.")
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: "POST",
|
||||
url: notification_base_url,
|
||||
data : data,
|
||||
statusCode: {
|
||||
400: function() {
|
||||
// More than likely the CSRF token was lost when the server restarted
|
||||
alert("There was a problem processing the request, please reload the page.");
|
||||
}
|
||||
}
|
||||
}).done(function(data){
|
||||
console.log(data);
|
||||
alert('Sent');
|
||||
}).fail(function(data){
|
||||
console.log(data);
|
||||
alert('There was an error communicating with the server.');
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
48
changedetectionio/static/js/tabs.js
Normal file
@@ -0,0 +1,48 @@
|
||||
// Rewrite this is a plugin.. is all this JS really 'worth it?'
|
||||
|
||||
window.addEventListener('hashchange', function () {
|
||||
var tabs = document.getElementsByClassName('active');
|
||||
while (tabs[0]) {
|
||||
tabs[0].classList.remove('active')
|
||||
}
|
||||
set_active_tab();
|
||||
}, false);
|
||||
|
||||
var has_errors = document.querySelectorAll(".messages .error");
|
||||
if (!has_errors.length) {
|
||||
if (document.location.hash == "") {
|
||||
document.querySelector(".tabs ul li:first-child a").click();
|
||||
} else {
|
||||
set_active_tab();
|
||||
}
|
||||
} else {
|
||||
focus_error_tab();
|
||||
}
|
||||
|
||||
function set_active_tab() {
|
||||
var tab = document.querySelectorAll("a[href='" + location.hash + "']");
|
||||
if (tab.length) {
|
||||
tab[0].parentElement.className = "active";
|
||||
}
|
||||
// hash could move the page down
|
||||
window.scrollTo(0, 0);
|
||||
}
|
||||
|
||||
function focus_error_tab() {
|
||||
// time to use jquery or vuejs really,
|
||||
// activate the tab with the error
|
||||
var tabs = document.querySelectorAll('.tabs li a'), i;
|
||||
for (i = 0; i < tabs.length; ++i) {
|
||||
var tab_name = tabs[i].hash.replace('#', '');
|
||||
var pane_errors = document.querySelectorAll('#' + tab_name + ' .error')
|
||||
if (pane_errors.length) {
|
||||
document.location.hash = '#' + tab_name;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
230
changedetectionio/static/js/visual-selector.js
Normal file
@@ -0,0 +1,230 @@
|
||||
// Horrible proof of concept code :)
|
||||
// yes - this is really a hack, if you are a front-ender and want to help, please get in touch!
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
var current_selected_i;
|
||||
var state_clicked=false;
|
||||
|
||||
var c;
|
||||
|
||||
// greyed out fill context
|
||||
var xctx;
|
||||
// redline highlight context
|
||||
var ctx;
|
||||
|
||||
var current_default_xpath;
|
||||
var x_scale=1;
|
||||
var y_scale=1;
|
||||
var selector_image;
|
||||
var selector_image_rect;
|
||||
var selector_data;
|
||||
|
||||
$('#visualselector-tab').click(function () {
|
||||
$("img#selector-background").off('load');
|
||||
state_clicked = false;
|
||||
current_selected_i = false;
|
||||
bootstrap_visualselector();
|
||||
});
|
||||
|
||||
$(document).on('keydown', function(event) {
|
||||
if ($("img#selector-background").is(":visible")) {
|
||||
if (event.key == "Escape") {
|
||||
state_clicked=false;
|
||||
ctx.clearRect(0, 0, c.width, c.height);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// For when the page loads
|
||||
if(!window.location.hash || window.location.hash != '#visualselector') {
|
||||
$("img#selector-background").attr('src','');
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle clearing button/link
|
||||
$('#clear-selector').on('click', function(event) {
|
||||
if(!state_clicked) {
|
||||
alert('Oops, Nothing selected!');
|
||||
}
|
||||
state_clicked=false;
|
||||
ctx.clearRect(0, 0, c.width, c.height);
|
||||
xctx.clearRect(0, 0, c.width, c.height);
|
||||
$("#css_filter").val('');
|
||||
});
|
||||
|
||||
|
||||
bootstrap_visualselector();
|
||||
|
||||
|
||||
|
||||
function bootstrap_visualselector() {
|
||||
if ( 1 ) {
|
||||
// bootstrap it, this will trigger everything else
|
||||
$("img#selector-background").bind('load', function () {
|
||||
console.log("Loaded background...");
|
||||
c = document.getElementById("selector-canvas");
|
||||
// greyed out fill context
|
||||
xctx = c.getContext("2d");
|
||||
// redline highlight context
|
||||
ctx = c.getContext("2d");
|
||||
current_default_xpath =$("#css_filter").val();
|
||||
fetch_data();
|
||||
$('#selector-canvas').off("mousemove mousedown");
|
||||
// screenshot_url defined in the edit.html template
|
||||
}).attr("src", screenshot_url);
|
||||
}
|
||||
}
|
||||
|
||||
function fetch_data() {
|
||||
// Image is ready
|
||||
$('.fetching-update-notice').html("Fetching element data..");
|
||||
|
||||
$.ajax({
|
||||
url: watch_visual_selector_data_url,
|
||||
context: document.body
|
||||
}).done(function (data) {
|
||||
$('.fetching-update-notice').html("Rendering..");
|
||||
selector_data = data;
|
||||
console.log("Reported browser width from backend: "+data['browser_width']);
|
||||
state_clicked=false;
|
||||
set_scale();
|
||||
reflow_selector();
|
||||
$('.fetching-update-notice').fadeOut();
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
||||
function set_scale() {
|
||||
|
||||
// some things to check if the scaling doesnt work
|
||||
// - that the widths/sizes really are about the actual screen size cat elements.json |grep -o width......|sort|uniq
|
||||
selector_image = $("img#selector-background")[0];
|
||||
selector_image_rect = selector_image.getBoundingClientRect();
|
||||
|
||||
// make the canvas the same size as the image
|
||||
$('#selector-canvas').attr('height', selector_image_rect.height);
|
||||
$('#selector-canvas').attr('width', selector_image_rect.width);
|
||||
$('#selector-wrapper').attr('width', selector_image_rect.width);
|
||||
x_scale = selector_image_rect.width / selector_data['browser_width'];
|
||||
y_scale = selector_image_rect.height / selector_image.naturalHeight;
|
||||
ctx.strokeStyle = 'rgba(255,0,0, 0.9)';
|
||||
ctx.fillStyle = 'rgba(255,0,0, 0.1)';
|
||||
ctx.lineWidth = 3;
|
||||
console.log("scaling set x: "+x_scale+" by y:"+y_scale);
|
||||
$("#selector-current-xpath").css('max-width', selector_image_rect.width);
|
||||
}
|
||||
|
||||
function reflow_selector() {
|
||||
$(window).resize(function() {
|
||||
set_scale();
|
||||
highlight_current_selected_i();
|
||||
});
|
||||
var selector_currnt_xpath_text=$("#selector-current-xpath span");
|
||||
|
||||
set_scale();
|
||||
|
||||
console.log(selector_data['size_pos'].length + " selectors found");
|
||||
|
||||
// highlight the default one if we can find it in the xPath list
|
||||
// or the xpath matches the default one
|
||||
found = false;
|
||||
if(current_default_xpath.length) {
|
||||
for (var i = selector_data['size_pos'].length; i!==0; i--) {
|
||||
var sel = selector_data['size_pos'][i-1];
|
||||
if(selector_data['size_pos'][i - 1].xpath == current_default_xpath) {
|
||||
console.log("highlighting "+current_default_xpath);
|
||||
current_selected_i = i-1;
|
||||
highlight_current_selected_i();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!found) {
|
||||
alert("Unfortunately your existing CSS/xPath Filter was no longer found!");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$('#selector-canvas').bind('mousemove', function (e) {
|
||||
if(state_clicked) {
|
||||
return;
|
||||
}
|
||||
ctx.clearRect(0, 0, c.width, c.height);
|
||||
current_selected_i=null;
|
||||
|
||||
// Add in offset
|
||||
if ((typeof e.offsetX === "undefined" || typeof e.offsetY === "undefined") || (e.offsetX === 0 && e.offsetY === 0)) {
|
||||
var targetOffset = $(e.target).offset();
|
||||
e.offsetX = e.pageX - targetOffset.left;
|
||||
e.offsetY = e.pageY - targetOffset.top;
|
||||
}
|
||||
|
||||
// Reverse order - the most specific one should be deeper/"laster"
|
||||
// Basically, find the most 'deepest'
|
||||
var found=0;
|
||||
ctx.fillStyle = 'rgba(205,0,0,0.35)';
|
||||
for (var i = selector_data['size_pos'].length; i!==0; i--) {
|
||||
// draw all of them? let them choose somehow?
|
||||
var sel = selector_data['size_pos'][i-1];
|
||||
// If we are in a bounding-box
|
||||
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
|
||||
&&
|
||||
e.offsetX > sel.left * y_scale && e.offsetX < sel.left * y_scale + sel.width * y_scale
|
||||
|
||||
) {
|
||||
|
||||
// FOUND ONE
|
||||
set_current_selected_text(sel.xpath);
|
||||
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
|
||||
// no need to keep digging
|
||||
// @todo or, O to go out/up, I to go in
|
||||
// or double click to go up/out the selector?
|
||||
current_selected_i=i-1;
|
||||
found+=1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}.debounce(5));
|
||||
|
||||
function set_current_selected_text(s) {
|
||||
selector_currnt_xpath_text[0].innerHTML=s;
|
||||
}
|
||||
|
||||
function highlight_current_selected_i() {
|
||||
if(state_clicked) {
|
||||
state_clicked=false;
|
||||
xctx.clearRect(0,0,c.width, c.height);
|
||||
return;
|
||||
}
|
||||
|
||||
var sel = selector_data['size_pos'][current_selected_i];
|
||||
if (sel[0] == '/') {
|
||||
// @todo - not sure just checking / is right
|
||||
$("#css_filter").val('xpath:'+sel.xpath);
|
||||
} else {
|
||||
$("#css_filter").val(sel.xpath);
|
||||
}
|
||||
xctx.fillStyle = 'rgba(205,205,205,0.95)';
|
||||
xctx.strokeStyle = 'rgba(225,0,0,0.9)';
|
||||
xctx.lineWidth = 3;
|
||||
xctx.fillRect(0,0,c.width, c.height);
|
||||
// Clear out what only should be seen (make a clear/clean spot)
|
||||
xctx.clearRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
xctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
|
||||
state_clicked=true;
|
||||
set_current_selected_text(sel.xpath);
|
||||
|
||||
}
|
||||
|
||||
|
||||
$('#selector-canvas').bind('mousedown', function (e) {
|
||||
highlight_current_selected_i();
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
39
changedetectionio/static/js/watch-overview.js
Normal file
@@ -0,0 +1,39 @@
|
||||
$(function () {
|
||||
// Remove unviewed status when normally clicked
|
||||
$('.diff-link').click(function () {
|
||||
$(this).closest('.unviewed').removeClass('unviewed');
|
||||
});
|
||||
|
||||
|
||||
$('.with-share-link > *').click(function () {
|
||||
$("#copied-clipboard").remove();
|
||||
|
||||
var range = document.createRange();
|
||||
var n=$("#share-link")[0];
|
||||
range.selectNode(n);
|
||||
window.getSelection().removeAllRanges();
|
||||
window.getSelection().addRange(range);
|
||||
document.execCommand("copy");
|
||||
window.getSelection().removeAllRanges();
|
||||
|
||||
$('.with-share-link').append('<span style="font-size: 80%; color: #fff;" id="copied-clipboard">Copied to clipboard</span>');
|
||||
$("#copied-clipboard").fadeOut(2500, function() {
|
||||
$(this).remove();
|
||||
});
|
||||
});
|
||||
|
||||
// checkboxes - check all
|
||||
$("#check-all").click(function (e) {
|
||||
$('input[type=checkbox]').not(this).prop('checked', this.checked);
|
||||
});
|
||||
// checkboxes - show/hide buttons
|
||||
$("input[type=checkbox]").click(function (e) {
|
||||
if ($('input[type=checkbox]:checked').length) {
|
||||
$('#checkbox-operations').slideDown();
|
||||
} else {
|
||||
$('#checkbox-operations').slideUp();
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
40
changedetectionio/static/js/watch-settings.js
Normal file
@@ -0,0 +1,40 @@
|
||||
$(document).ready(function() {
|
||||
function toggle() {
|
||||
if ($('input[name="fetch_backend"]:checked').val() == 'html_webdriver') {
|
||||
if(playwright_enabled) {
|
||||
// playwright supports headers, so hide everything else
|
||||
// See #664
|
||||
$('#requests-override-options #request-method').hide();
|
||||
$('#requests-override-options #request-body').hide();
|
||||
|
||||
// @todo connect this one up
|
||||
$('#ignore-status-codes-option').hide();
|
||||
} else {
|
||||
// selenium/webdriver doesnt support anything afaik, hide it all
|
||||
$('#requests-override-options').hide();
|
||||
}
|
||||
|
||||
|
||||
$('#webdriver-override-options').show();
|
||||
|
||||
} else {
|
||||
|
||||
$('#requests-override-options').show();
|
||||
$('#requests-override-options *:hidden').show();
|
||||
$('#webdriver-override-options').hide();
|
||||
}
|
||||
}
|
||||
|
||||
$('input[name="fetch_backend"]').click(function (e) {
|
||||
toggle();
|
||||
});
|
||||
toggle();
|
||||
|
||||
$('#notification-setting-reset-to-default').click(function (e) {
|
||||
$('#notification_title').val('');
|
||||
$('#notification_body').val('');
|
||||
$('#notification_format').val('System default');
|
||||
$('#notification_urls').val('');
|
||||
e.preventDefault();
|
||||
});
|
||||
});
|
||||
3
changedetectionio/static/styles/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
package-lock.json
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
#diff-ui {
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin: 1em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
font-size: 9px; }
|
||||
font-size: 11px; }
|
||||
#diff-ui table {
|
||||
table-layout: fixed;
|
||||
width: 100%; }
|
||||
@@ -54,3 +55,24 @@ ins {
|
||||
body {
|
||||
height: 99%;
|
||||
/* Hide scroll bar in Firefox */ } }
|
||||
|
||||
td#diff-col div {
|
||||
text-align: justify;
|
||||
white-space: pre-wrap; }
|
||||
|
||||
.ignored {
|
||||
background-color: #ccc;
|
||||
/* border: #0d91fa 1px solid; */
|
||||
opacity: 0.7; }
|
||||
|
||||
.triggered {
|
||||
background-color: #1b98f8; }
|
||||
|
||||
/* ignored and triggered? make it obvious error */
|
||||
.ignored.triggered {
|
||||
background-color: #ff0000; }
|
||||
|
||||
.tab-pane-inner#screenshot {
|
||||
text-align: center; }
|
||||
.tab-pane-inner#screenshot img {
|
||||
max-width: 99%; }
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin: 1em;
|
||||
margin-left: 1em;
|
||||
margin-right: 1em;
|
||||
border-radius: 5px;
|
||||
font-size: 9px;
|
||||
font-size: 11px;
|
||||
|
||||
table {
|
||||
table-layout: fixed;
|
||||
@@ -65,4 +66,31 @@ ins {
|
||||
body {
|
||||
height: 99%; /* Hide scroll bar in Firefox */
|
||||
}
|
||||
}
|
||||
|
||||
td#diff-col div {
|
||||
text-align: justify;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.ignored {
|
||||
background-color: #ccc;
|
||||
/* border: #0d91fa 1px solid; */
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.triggered {
|
||||
background-color: #1b98f8;
|
||||
}
|
||||
|
||||
/* ignored and triggered? make it obvious error */
|
||||
.ignored.triggered {
|
||||
background-color: #ff0000;
|
||||
}
|
||||
|
||||
.tab-pane-inner#screenshot {
|
||||
text-align: center;
|
||||
img {
|
||||
max-width: 99%;
|
||||
}
|
||||
}
|
||||
@@ -4,13 +4,13 @@
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"scss": "node-sass --watch styles.scss diff.scss -o ."
|
||||
"build": "node-sass styles.scss -o .;node-sass diff.scss -o ."
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"node-sass": "^6.0.1",
|
||||
"node-sass": "^7.0.0",
|
||||
"tar": "^6.1.9",
|
||||
"trim-newlines": "^3.0.1"
|
||||
}
|
||||
}
|
||||
26
changedetectionio/static/styles/parts/_arrows.scss
Normal file
@@ -0,0 +1,26 @@
|
||||
.arrow {
|
||||
border: solid #1b98f8;
|
||||
border-width: 0 2px 2px 0;
|
||||
display: inline-block;
|
||||
padding: 3px;
|
||||
|
||||
&.right {
|
||||
transform: rotate(-45deg);
|
||||
-webkit-transform: rotate(-45deg);
|
||||
}
|
||||
|
||||
&.left {
|
||||
transform: rotate(135deg);
|
||||
-webkit-transform: rotate(135deg);
|
||||
}
|
||||
|
||||
&.up, &.asc {
|
||||
transform: rotate(-135deg);
|
||||
-webkit-transform: rotate(-135deg);
|
||||
}
|
||||
|
||||
&.down, &.desc {
|
||||
transform: rotate(45deg);
|
||||
-webkit-transform: rotate(45deg);
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,27 @@
|
||||
/*
|
||||
* -- BASE STYLES --
|
||||
* Most of these are inherited from Base, but I want to change a few.
|
||||
* npm run scss
|
||||
* nvm use v14.18.1 && npm install && npm run build
|
||||
* or npm run watch
|
||||
*/
|
||||
.arrow {
|
||||
border: solid #1b98f8;
|
||||
border-width: 0 2px 2px 0;
|
||||
display: inline-block;
|
||||
padding: 3px; }
|
||||
.arrow.right {
|
||||
transform: rotate(-45deg);
|
||||
-webkit-transform: rotate(-45deg); }
|
||||
.arrow.left {
|
||||
transform: rotate(135deg);
|
||||
-webkit-transform: rotate(135deg); }
|
||||
.arrow.up, .arrow.asc {
|
||||
transform: rotate(-135deg);
|
||||
-webkit-transform: rotate(-135deg); }
|
||||
.arrow.down, .arrow.desc {
|
||||
transform: rotate(45deg);
|
||||
-webkit-transform: rotate(45deg); }
|
||||
|
||||
body {
|
||||
color: #333;
|
||||
background: #262626; }
|
||||
@@ -28,27 +47,36 @@ a.github-link {
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-bottom: 5em;
|
||||
padding-bottom: 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center; }
|
||||
|
||||
code {
|
||||
background: #eee; }
|
||||
|
||||
/* table related */
|
||||
.watch-table {
|
||||
width: 100%; }
|
||||
width: 100%;
|
||||
font-size: 80%; }
|
||||
.watch-table tr.unviewed {
|
||||
font-weight: bold; }
|
||||
.watch-table .error {
|
||||
color: #a00; }
|
||||
.watch-table td {
|
||||
font-size: 80%;
|
||||
white-space: nowrap; }
|
||||
.watch-table td.title-col {
|
||||
word-break: break-all;
|
||||
white-space: normal; }
|
||||
.watch-table th {
|
||||
white-space: nowrap; }
|
||||
.watch-table th a {
|
||||
font-weight: normal; }
|
||||
.watch-table th a.active {
|
||||
font-weight: bolder; }
|
||||
.watch-table th a.inactive .arrow {
|
||||
display: none; }
|
||||
.watch-table .title-col a[target="_blank"]::after, .watch-table .current-diff-url::after {
|
||||
content: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAQElEQVR42qXKwQkAIAxDUUdxtO6/RBQkQZvSi8I/pL4BoGw/XPkh4XigPmsUgh0626AjRsgxHTkUThsG2T/sIlzdTsp52kSS1wAAAABJRU5ErkJggg==);
|
||||
margin: 0 3px 0 5px; }
|
||||
@@ -77,11 +105,11 @@ section.content {
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, #ff7a18, #af002d 41.07%, #319197 76.05%); }
|
||||
background: linear-gradient(130deg, #5ad8f7, #2f50af 41.07%, #9150bf 84.05%); }
|
||||
|
||||
body:after, body:before {
|
||||
display: block;
|
||||
height: 600px;
|
||||
height: 650px;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
@@ -93,33 +121,12 @@ body::after {
|
||||
|
||||
body::before {
|
||||
content: "";
|
||||
background-image: url(/static/images/gradient-border.png); }
|
||||
|
||||
body:before {
|
||||
background-size: cover; }
|
||||
|
||||
body:after, body:before {
|
||||
-webkit-clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%);
|
||||
clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%); }
|
||||
|
||||
.arrow {
|
||||
border: solid black;
|
||||
border-width: 0 3px 3px 0;
|
||||
display: inline-block;
|
||||
padding: 3px; }
|
||||
.arrow.right {
|
||||
transform: rotate(-45deg);
|
||||
-webkit-transform: rotate(-45deg); }
|
||||
.arrow.left {
|
||||
transform: rotate(135deg);
|
||||
-webkit-transform: rotate(135deg); }
|
||||
.arrow.up {
|
||||
transform: rotate(-135deg);
|
||||
-webkit-transform: rotate(-135deg); }
|
||||
.arrow.down {
|
||||
transform: rotate(45deg);
|
||||
-webkit-transform: rotate(45deg); }
|
||||
|
||||
.button-small {
|
||||
font-size: 85%; }
|
||||
|
||||
@@ -129,13 +136,6 @@ body:after, body:before {
|
||||
max-width: 400px;
|
||||
display: block; }
|
||||
|
||||
.edit-form {
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin: 1em;
|
||||
border-radius: 5px;
|
||||
min-width: 70%; }
|
||||
|
||||
.button-secondary {
|
||||
color: white;
|
||||
border-radius: 4px;
|
||||
@@ -184,14 +184,19 @@ body:after, body:before {
|
||||
.messages li.notice {
|
||||
background: rgba(255, 255, 255, 0.5); }
|
||||
|
||||
.messages.with-share-link > *:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#notification-customisation {
|
||||
display: block;
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px; }
|
||||
|
||||
#toggle-customise-notifications {
|
||||
cursor: pointer; }
|
||||
#notification-error-log {
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
border-radius: 5px;
|
||||
overflow-wrap: break-word; }
|
||||
|
||||
#token-table.pure-table td, #token-table.pure-table th {
|
||||
font-size: 80%; }
|
||||
@@ -202,12 +207,18 @@ body:after, body:before {
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em; }
|
||||
#new-watch-form input {
|
||||
width: auto !important;
|
||||
display: inline-block; }
|
||||
display: inline-block;
|
||||
margin-bottom: 5px; }
|
||||
#new-watch-form .label {
|
||||
display: none; }
|
||||
#new-watch-form legend {
|
||||
color: #fff; }
|
||||
color: #fff;
|
||||
font-weight: bold; }
|
||||
#new-watch-form #watch-add-wrapper-zone > div {
|
||||
display: inline-block; }
|
||||
@media only screen and (max-width: 760px) {
|
||||
#new-watch-form #watch-add-wrapper-zone #url {
|
||||
width: 100%; } }
|
||||
|
||||
#diff-col {
|
||||
padding-left: 40px; }
|
||||
@@ -221,15 +232,14 @@ body:after, body:before {
|
||||
border-top-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
box-shadow: 5px 0 5px -2px #888; }
|
||||
|
||||
#diff-jump a {
|
||||
color: #1b98f8;
|
||||
cursor: grabbing;
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
-o-user-select: none; }
|
||||
#diff-jump a {
|
||||
color: #1b98f8;
|
||||
cursor: grabbing;
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
-o-user-select: none; }
|
||||
|
||||
footer {
|
||||
padding: 10px;
|
||||
@@ -251,23 +261,31 @@ footer {
|
||||
|
||||
.sticky-tab {
|
||||
position: absolute;
|
||||
top: 80px;
|
||||
font-size: 8px;
|
||||
top: 60px;
|
||||
font-size: 65%;
|
||||
background: #fff;
|
||||
padding: 10px; }
|
||||
.sticky-tab#left-sticky {
|
||||
left: 0px; }
|
||||
.sticky-tab#right-sticky {
|
||||
right: 0px; }
|
||||
.sticky-tab#hosted-sticky {
|
||||
right: 0px;
|
||||
top: 100px;
|
||||
font-weight: bold; }
|
||||
|
||||
#new-version-text a {
|
||||
color: #e07171; }
|
||||
|
||||
.paused-state.state-False img {
|
||||
opacity: 0.2; }
|
||||
|
||||
.paused-state.state-False:hover img {
|
||||
opacity: 0.8; }
|
||||
.watch-controls {
|
||||
/* default */ }
|
||||
.watch-controls .state-on img {
|
||||
opacity: 0.8; }
|
||||
.watch-controls img {
|
||||
opacity: 0.2; }
|
||||
.watch-controls img:hover {
|
||||
transition: opacity 0.3s;
|
||||
opacity: 0.8; }
|
||||
|
||||
.monospaced-textarea textarea {
|
||||
width: 100%;
|
||||
@@ -279,10 +297,20 @@ footer {
|
||||
.pure-form {
|
||||
/* The input fields with errors */
|
||||
/* The list of errors */ }
|
||||
.pure-form fieldset {
|
||||
padding-top: 0px; }
|
||||
.pure-form fieldset ul {
|
||||
padding-bottom: 0px;
|
||||
margin-bottom: 0px; }
|
||||
.pure-form .pure-control-group, .pure-form .pure-group, .pure-form .pure-controls {
|
||||
padding-bottom: 1em; }
|
||||
.pure-form .pure-control-group div, .pure-form .pure-group div, .pure-form .pure-controls div {
|
||||
margin: 0px; }
|
||||
.pure-form .pure-control-group .checkbox > *, .pure-form .pure-group .checkbox > *, .pure-form .pure-controls .checkbox > * {
|
||||
display: inline;
|
||||
vertical-align: middle; }
|
||||
.pure-form .pure-control-group .checkbox > label, .pure-form .pure-group .checkbox > label, .pure-form .pure-controls .checkbox > label {
|
||||
padding-left: 5px; }
|
||||
.pure-form .error input {
|
||||
background-color: #ffebeb; }
|
||||
.pure-form ul.errors {
|
||||
@@ -297,32 +325,49 @@ footer {
|
||||
color: #dd0000; }
|
||||
.pure-form label {
|
||||
font-weight: bold; }
|
||||
.pure-form input[type=url] {
|
||||
width: 100%; }
|
||||
.pure-form textarea {
|
||||
width: 100%; }
|
||||
.pure-form .inline-radio ul {
|
||||
margin: 0px;
|
||||
list-style: none; }
|
||||
.pure-form .inline-radio ul li > * {
|
||||
display: inline-block; }
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.box {
|
||||
max-width: 95%; }
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0.5em; }
|
||||
margin: 0; }
|
||||
#nav-menu {
|
||||
overflow-x: scroll; } }
|
||||
|
||||
/*
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 800px) {
|
||||
div.sticky-tab#hosted-sticky {
|
||||
top: 60px;
|
||||
left: 0px;
|
||||
right: auto; }
|
||||
section.content {
|
||||
padding-top: 110px; }
|
||||
div.tabs.collapsable ul li {
|
||||
display: block;
|
||||
border-radius: 0px;
|
||||
margin-right: 0px; }
|
||||
input[type='text'] {
|
||||
width: 100%; }
|
||||
/*
|
||||
Max width before this PARTICULAR table gets nasty
|
||||
This query will take effect for any screen smaller than 760px
|
||||
and also iPads specifically.
|
||||
*/
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
.watch-table {
|
||||
/* Force table to not be like tables anymore */
|
||||
/* Force table to not be like tables anymore */
|
||||
/* Hide table headers (but not display: none;, for accessibility) */ }
|
||||
.watch-table thead, .watch-table tbody, .watch-table th, .watch-table td, .watch-table tr {
|
||||
display: block; }
|
||||
.watch-table .last-checked > span {
|
||||
vertical-align: middle; }
|
||||
.watch-table .last-checked::before {
|
||||
color: #555;
|
||||
content: "Last Checked "; }
|
||||
@@ -340,7 +385,8 @@ and also iPads specifically.
|
||||
.watch-table td {
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid #eee; }
|
||||
border-bottom: 1px solid #eee;
|
||||
vertical-align: middle; }
|
||||
.watch-table td:before {
|
||||
/* Top/left values mimic padding */
|
||||
top: 6px;
|
||||
@@ -354,3 +400,181 @@ and also iPads specifically.
|
||||
background-color: #eee; }
|
||||
.watch-table.pure-table-striped tr:nth-child(2n-1) td {
|
||||
background-color: inherit; } }
|
||||
|
||||
/** Desktop vs mobile input field strategy
|
||||
- We dont use 'size' with <input> because `size` is too unreliable to override, and will often push-out
|
||||
- Rely always on width in CSS
|
||||
*/
|
||||
@media only screen and (min-width: 761px) {
|
||||
/* m-d is medium-desktop */
|
||||
.m-d {
|
||||
min-width: 80%; } }
|
||||
|
||||
.tabs ul {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
display: block; }
|
||||
.tabs ul li {
|
||||
margin-right: 3px;
|
||||
display: inline-block;
|
||||
color: #fff;
|
||||
border-top-left-radius: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
background-color: rgba(255, 255, 255, 0.2); }
|
||||
.tabs ul li.active, .tabs ul li :target {
|
||||
background-color: #fff; }
|
||||
.tabs ul li.active a, .tabs ul li :target a {
|
||||
color: #222;
|
||||
font-weight: bold; }
|
||||
.tabs ul li a {
|
||||
display: block;
|
||||
padding: 0.8em;
|
||||
color: #fff; }
|
||||
|
||||
.pure-form-stacked > div:first-child {
|
||||
display: block; }
|
||||
|
||||
.login-form .inner {
|
||||
background: #fff;
|
||||
padding: 20px;
|
||||
border-radius: 5px; }
|
||||
|
||||
.tab-pane-inner {
|
||||
padding: 0px; }
|
||||
.tab-pane-inner:not(:target) {
|
||||
display: none; }
|
||||
.tab-pane-inner:target {
|
||||
display: block; }
|
||||
|
||||
#beta-logo {
|
||||
height: 50px;
|
||||
right: -3px;
|
||||
top: -3px;
|
||||
position: absolute; }
|
||||
|
||||
#selector-header {
|
||||
padding-bottom: 1em; }
|
||||
|
||||
.edit-form {
|
||||
min-width: 70%;
|
||||
/* so it cant overflow */
|
||||
max-width: 95%; }
|
||||
.edit-form .box-wrap {
|
||||
position: relative; }
|
||||
.edit-form .inner {
|
||||
background: #fff;
|
||||
padding: 20px; }
|
||||
.edit-form #actions {
|
||||
display: block;
|
||||
background: #fff; }
|
||||
.edit-form .pure-form-message-inline {
|
||||
padding-left: 0; }
|
||||
|
||||
ul {
|
||||
padding-left: 1em;
|
||||
padding-top: 0px;
|
||||
margin-top: 4px; }
|
||||
|
||||
.time-check-widget tr {
|
||||
display: inline; }
|
||||
.time-check-widget tr input[type="number"] {
|
||||
width: 5em; }
|
||||
|
||||
#selector-wrapper {
|
||||
height: 600px;
|
||||
overflow-y: scroll;
|
||||
position: relative; }
|
||||
#selector-wrapper > img {
|
||||
position: absolute;
|
||||
z-index: 4;
|
||||
max-width: 100%; }
|
||||
#selector-wrapper > canvas {
|
||||
position: relative;
|
||||
z-index: 5;
|
||||
max-width: 100%; }
|
||||
#selector-wrapper > canvas:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#selector-current-xpath {
|
||||
font-size: 80%; }
|
||||
|
||||
#webdriver-override-options input[type="number"] {
|
||||
width: 5em; }
|
||||
|
||||
#api-key:hover {
|
||||
cursor: pointer; }
|
||||
|
||||
#api-key-copy {
|
||||
color: #0078e7; }
|
||||
|
||||
/* spinner */
|
||||
.loader,
|
||||
.loader:after {
|
||||
border-radius: 50%;
|
||||
width: 10px;
|
||||
height: 10px; }
|
||||
|
||||
.loader {
|
||||
margin: 0px auto;
|
||||
font-size: 3px;
|
||||
vertical-align: middle;
|
||||
display: inline-block;
|
||||
text-indent: -9999em;
|
||||
border-top: 1.1em solid rgba(38, 104, 237, 0.2);
|
||||
border-right: 1.1em solid rgba(38, 104, 237, 0.2);
|
||||
border-bottom: 1.1em solid rgba(38, 104, 237, 0.2);
|
||||
border-left: 1.1em solid #2668ed;
|
||||
-webkit-transform: translateZ(0);
|
||||
-ms-transform: translateZ(0);
|
||||
transform: translateZ(0);
|
||||
-webkit-animation: load8 1.1s infinite linear;
|
||||
animation: load8 1.1s infinite linear; }
|
||||
|
||||
@-webkit-keyframes load8 {
|
||||
0% {
|
||||
-webkit-transform: rotate(0deg);
|
||||
transform: rotate(0deg); }
|
||||
100% {
|
||||
-webkit-transform: rotate(360deg);
|
||||
transform: rotate(360deg); } }
|
||||
|
||||
@keyframes load8 {
|
||||
0% {
|
||||
-webkit-transform: rotate(0deg);
|
||||
transform: rotate(0deg); }
|
||||
100% {
|
||||
-webkit-transform: rotate(360deg);
|
||||
transform: rotate(360deg); } }
|
||||
|
||||
.snapshot-age {
|
||||
padding: 4px;
|
||||
background-color: #dfdfdf;
|
||||
border-radius: 3px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 4px; }
|
||||
.snapshot-age.error {
|
||||
background-color: #ff0000;
|
||||
color: #fff; }
|
||||
|
||||
#checkbox-operations {
|
||||
background: rgba(0, 0, 0, 0.05);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
display: none; }
|
||||
|
||||
.checkbox-uuid > * {
|
||||
vertical-align: middle; }
|
||||
|
||||
.inline-warning {
|
||||
border: 1px solid #ff3300;
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px;
|
||||
color: #ff3300; }
|
||||
.inline-warning > span {
|
||||
display: inline-block;
|
||||
vertical-align: middle; }
|
||||
.inline-warning img.inline-warning-icon {
|
||||
display: inline;
|
||||
height: 26px;
|
||||
vertical-align: middle; }
|
||||
@@ -1,13 +1,15 @@
|
||||
/*
|
||||
* -- BASE STYLES --
|
||||
* Most of these are inherited from Base, but I want to change a few.
|
||||
* npm run scss
|
||||
* nvm use v14.18.1 && npm install && npm run build
|
||||
* or npm run watch
|
||||
*/
|
||||
@import "parts/_arrows.scss";
|
||||
|
||||
body {
|
||||
color: #333;
|
||||
background: #262626;
|
||||
}
|
||||
|
||||
.pure-table-even {
|
||||
background: #fff;
|
||||
}
|
||||
@@ -33,16 +35,21 @@ a.github-link {
|
||||
|
||||
section.content {
|
||||
padding-top: 5em;
|
||||
padding-bottom: 5em;
|
||||
padding-bottom: 1em;
|
||||
flex-direction: column;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
code {
|
||||
background: #eee;
|
||||
}
|
||||
|
||||
/* table related */
|
||||
.watch-table {
|
||||
width: 100%;
|
||||
font-size: 80%;
|
||||
|
||||
tr.unviewed {
|
||||
font-weight: bold;
|
||||
@@ -53,7 +60,6 @@ section.content {
|
||||
}
|
||||
|
||||
td {
|
||||
font-size: 80%;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
@@ -64,6 +70,17 @@ section.content {
|
||||
|
||||
th {
|
||||
white-space: nowrap;
|
||||
a {
|
||||
font-weight: normal;
|
||||
&.active {
|
||||
font-weight: bolder;
|
||||
}
|
||||
&.inactive {
|
||||
.arrow {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.title-col a[target="_blank"]::after, .current-diff-url::after {
|
||||
@@ -105,12 +122,12 @@ section.content {
|
||||
|
||||
body:after {
|
||||
content: "";
|
||||
background: linear-gradient(130deg, #ff7a18, #af002d 41.07%, #319197 76.05%)
|
||||
background: linear-gradient(130deg, #5ad8f7, #2f50af 41.07%, #9150bf 84.05%);
|
||||
}
|
||||
|
||||
body:after, body:before {
|
||||
display: block;
|
||||
height: 600px;
|
||||
height: 650px;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
@@ -123,11 +140,8 @@ body::after {
|
||||
}
|
||||
|
||||
body::before {
|
||||
// background-image set in base.html so it works with reverse proxies etc
|
||||
content: "";
|
||||
background-image: url(/static/images/gradient-border.png);
|
||||
}
|
||||
|
||||
body:before {
|
||||
background-size: cover
|
||||
}
|
||||
|
||||
@@ -136,29 +150,6 @@ body:after, body:before {
|
||||
clip-path: polygon(100% 0, 0 0, 0 77.5%, 1% 77.4%, 2% 77.1%, 3% 76.6%, 4% 75.9%, 5% 75.05%, 6% 74.05%, 7% 72.95%, 8% 71.75%, 9% 70.55%, 10% 69.3%, 11% 68.05%, 12% 66.9%, 13% 65.8%, 14% 64.8%, 15% 64%, 16% 63.35%, 17% 62.85%, 18% 62.6%, 19% 62.5%, 20% 62.65%, 21% 63%, 22% 63.5%, 23% 64.2%, 24% 65.1%, 25% 66.1%, 26% 67.2%, 27% 68.4%, 28% 69.65%, 29% 70.9%, 30% 72.15%, 31% 73.3%, 32% 74.35%, 33% 75.3%, 34% 76.1%, 35% 76.75%, 36% 77.2%, 37% 77.45%, 38% 77.5%, 39% 77.3%, 40% 76.95%, 41% 76.4%, 42% 75.65%, 43% 74.75%, 44% 73.75%, 45% 72.6%, 46% 71.4%, 47% 70.15%, 48% 68.9%, 49% 67.7%, 50% 66.55%, 51% 65.5%, 52% 64.55%, 53% 63.75%, 54% 63.15%, 55% 62.75%, 56% 62.55%, 57% 62.5%, 58% 62.7%, 59% 63.1%, 60% 63.7%, 61% 64.45%, 62% 65.4%, 63% 66.45%, 64% 67.6%, 65% 68.8%, 66% 70.05%, 67% 71.3%, 68% 72.5%, 69% 73.6%, 70% 74.65%, 71% 75.55%, 72% 76.35%, 73% 76.9%, 74% 77.3%, 75% 77.5%, 76% 77.45%, 77% 77.25%, 78% 76.8%, 79% 76.2%, 80% 75.4%, 81% 74.45%, 82% 73.4%, 83% 72.25%, 84% 71.05%, 85% 69.8%, 86% 68.55%, 87% 67.35%, 88% 66.2%, 89% 65.2%, 90% 64.3%, 91% 63.55%, 92% 63%, 93% 62.65%, 94% 62.5%, 95% 62.55%, 96% 62.8%, 97% 63.3%, 98% 63.9%, 99% 64.75%, 100% 65.7%)
|
||||
}
|
||||
|
||||
.arrow {
|
||||
border: solid black;
|
||||
border-width: 0 3px 3px 0;
|
||||
display: inline-block;
|
||||
padding: 3px;
|
||||
&.right {
|
||||
transform: rotate(-45deg);
|
||||
-webkit-transform: rotate(-45deg);
|
||||
}
|
||||
&.left {
|
||||
transform: rotate(135deg);
|
||||
-webkit-transform: rotate(135deg);
|
||||
}
|
||||
&.up {
|
||||
transform: rotate(-135deg);
|
||||
-webkit-transform: rotate(-135deg);
|
||||
}
|
||||
&.down {
|
||||
transform: rotate(45deg);
|
||||
-webkit-transform: rotate(45deg);
|
||||
}
|
||||
}
|
||||
|
||||
.button-small {
|
||||
font-size: 85%;
|
||||
}
|
||||
@@ -170,13 +161,6 @@ body:after, body:before {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
background: #fff;
|
||||
padding: 2em;
|
||||
margin: 1em;
|
||||
border-radius: 5px;
|
||||
min-width: 70%;
|
||||
}
|
||||
|
||||
.button-secondary {
|
||||
color: white;
|
||||
@@ -241,20 +225,26 @@ body:after, body:before {
|
||||
background: rgba(255, 255, 255, .5);
|
||||
}
|
||||
}
|
||||
&.with-share-link {
|
||||
> *:hover {
|
||||
cursor:pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#notification-customisation {
|
||||
display: block;
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#toggle-customise-notifications {
|
||||
cursor: pointer;
|
||||
#notification-error-log {
|
||||
border: 1px solid #ccc;
|
||||
padding: 1rem;
|
||||
border-radius: 5px;
|
||||
overflow-wrap: break-word;
|
||||
}
|
||||
|
||||
|
||||
#token-table {
|
||||
&.pure-table td, &.pure-table th {
|
||||
font-size: 80%;
|
||||
@@ -267,14 +257,26 @@ body:after, body:before {
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
input {
|
||||
width: auto !important;
|
||||
display: inline-block;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
.label {
|
||||
display: none;
|
||||
}
|
||||
legend {
|
||||
color: #fff;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#watch-add-wrapper-zone {
|
||||
> div {
|
||||
display: inline-block;
|
||||
}
|
||||
@media only screen and (max-width: 760px) {
|
||||
#url {
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -294,16 +296,15 @@ body:after, body:before {
|
||||
border-top-right-radius: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
box-shadow: 5px 0 5px -2px #888;
|
||||
}
|
||||
|
||||
#diff-jump a {
|
||||
color: #1b98f8;
|
||||
cursor: grabbing;
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
-o-user-select: none;
|
||||
a {
|
||||
color: #1b98f8;
|
||||
cursor: grabbing;
|
||||
-moz-user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
-o-user-select: none;
|
||||
}
|
||||
}
|
||||
|
||||
footer {
|
||||
@@ -328,12 +329,10 @@ footer {
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
|
||||
.sticky-tab {
|
||||
position: absolute;
|
||||
top: 80px;
|
||||
font-size: 8px;
|
||||
top: 60px;
|
||||
font-size: 65%;
|
||||
background: #fff;
|
||||
padding: 10px;
|
||||
&#left-sticky {
|
||||
@@ -342,20 +341,36 @@ footer {
|
||||
&#right-sticky {
|
||||
right: 0px;
|
||||
}
|
||||
&#hosted-sticky {
|
||||
right: 0px;
|
||||
top: 100px;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
|
||||
#new-version-text a {
|
||||
color: #e07171;
|
||||
}
|
||||
|
||||
.paused-state {
|
||||
&.state-False img {
|
||||
.watch-controls {
|
||||
.state-on {
|
||||
img {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
/* default */
|
||||
img {
|
||||
opacity: 0.2;
|
||||
}
|
||||
|
||||
&.state-False:hover img {
|
||||
opacity: 0.8;
|
||||
img {
|
||||
&:hover {
|
||||
transition: opacity 0.3s;
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
.monospaced-textarea {
|
||||
@@ -370,11 +385,27 @@ footer {
|
||||
|
||||
|
||||
.pure-form {
|
||||
fieldset {
|
||||
padding-top: 0px;
|
||||
ul {
|
||||
padding-bottom: 0px;
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
}
|
||||
.pure-control-group, .pure-group, .pure-controls {
|
||||
padding-bottom: 1em;
|
||||
div {
|
||||
margin: 0px;
|
||||
}
|
||||
.checkbox {
|
||||
> * {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
}
|
||||
> label {
|
||||
padding-left: 5px;
|
||||
}
|
||||
}
|
||||
}
|
||||
/* The input fields with errors */
|
||||
.error {
|
||||
@@ -401,13 +432,20 @@ footer {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
input[type=url] {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
textarea {
|
||||
width: 100%;
|
||||
}
|
||||
.inline-radio {
|
||||
ul {
|
||||
margin: 0px;
|
||||
list-style: none;
|
||||
li {
|
||||
> * {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
@@ -416,28 +454,55 @@ footer {
|
||||
}
|
||||
.edit-form {
|
||||
padding: 0.5em;
|
||||
margin: 0.5em;
|
||||
margin: 0;
|
||||
}
|
||||
#nav-menu {
|
||||
overflow-x: scroll;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@media only screen and (max-width: 760px), (min-device-width: 768px) and (max-device-width: 800px) {
|
||||
|
||||
div.sticky-tab#hosted-sticky {
|
||||
top: 60px;
|
||||
left: 0px;
|
||||
right: auto;
|
||||
}
|
||||
|
||||
section.content {
|
||||
padding-top: 110px;
|
||||
}
|
||||
|
||||
// Make the tabs easier to hit, they will be all nice and horizontal
|
||||
div.tabs.collapsable ul li {
|
||||
display: block;
|
||||
border-radius: 0px;
|
||||
margin-right: 0px;
|
||||
}
|
||||
|
||||
input[type='text'] {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/*
|
||||
Max width before this PARTICULAR table gets nasty
|
||||
This query will take effect for any screen smaller than 760px
|
||||
and also iPads specifically.
|
||||
*/
|
||||
@media only screen and (max-width: 760px),
|
||||
(min-device-width: 768px) and (max-device-width: 1024px) {
|
||||
|
||||
.watch-table {
|
||||
/* Force table to not be like tables anymore */
|
||||
thead, tbody, th, td, tr {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.last-checked {
|
||||
> span {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.last-checked::before {
|
||||
color: #555;
|
||||
content: "Last Checked ";
|
||||
@@ -468,7 +533,7 @@ and also iPads specifically.
|
||||
/* Behave like a "row" */
|
||||
border: none;
|
||||
border-bottom: 1px solid #eee;
|
||||
|
||||
vertical-align: middle;
|
||||
&:before {
|
||||
/* Top/left values mimic padding */
|
||||
top: 6px;
|
||||
@@ -496,3 +561,246 @@ and also iPads specifically.
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Desktop vs mobile input field strategy
|
||||
- We dont use 'size' with <input> because `size` is too unreliable to override, and will often push-out
|
||||
- Rely always on width in CSS
|
||||
*/
|
||||
@media only screen and (min-width: 761px) {
|
||||
/* m-d is medium-desktop */
|
||||
.m-d {
|
||||
min-width: 80%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
.tabs {
|
||||
ul {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
display:block;
|
||||
li {
|
||||
margin-right: 3px;
|
||||
display: inline-block;
|
||||
color: #fff;
|
||||
border-top-left-radius: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
background-color: rgba(255, 255, 255, 0.2);
|
||||
|
||||
&.active,:target {
|
||||
background-color: #fff;
|
||||
a {
|
||||
color: #222;
|
||||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
a {
|
||||
display: block;
|
||||
padding: 0.8em;
|
||||
color: #fff;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$form-edge-padding: 20px;
|
||||
.pure-form-stacked {
|
||||
>div:first-child {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
.login-form {
|
||||
.inner {
|
||||
background: #fff;;
|
||||
padding: $form-edge-padding;
|
||||
border-radius: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
.tab-pane-inner {
|
||||
&:not(:target) {
|
||||
display: none;
|
||||
}
|
||||
&:target {
|
||||
display: block;
|
||||
}
|
||||
// doesnt need padding because theres another row of buttons/activity
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
#beta-logo {
|
||||
height: 50px;
|
||||
// looks better when it's hanging off a little
|
||||
right: -3px;
|
||||
top: -3px;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
#selector-header {
|
||||
padding-bottom: 1em;
|
||||
}
|
||||
|
||||
.edit-form {
|
||||
min-width: 70%;
|
||||
/* so it cant overflow */
|
||||
max-width: 95%;
|
||||
.box-wrap {
|
||||
position: relative;
|
||||
}
|
||||
.inner {
|
||||
background: #fff;;
|
||||
padding: $form-edge-padding;
|
||||
}
|
||||
#actions {
|
||||
display: block;
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
.pure-form-message-inline {
|
||||
padding-left: 0;
|
||||
}
|
||||
}
|
||||
|
||||
ul {
|
||||
padding-left: 1em;
|
||||
padding-top: 0px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.time-check-widget {
|
||||
tr {
|
||||
display: inline;
|
||||
input[type="number"] {
|
||||
width: 5em;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#selector-wrapper {
|
||||
height: 600px;
|
||||
overflow-y: scroll;
|
||||
position: relative;
|
||||
//width: 100%;
|
||||
> img {
|
||||
position: absolute;
|
||||
z-index: 4;
|
||||
max-width: 100%;
|
||||
}
|
||||
>canvas {
|
||||
position: relative;
|
||||
z-index: 5;
|
||||
max-width: 100%;
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#selector-current-xpath {
|
||||
font-size: 80%;
|
||||
}
|
||||
|
||||
#webdriver-override-options {
|
||||
input[type="number"] {
|
||||
width: 5em;
|
||||
}
|
||||
}
|
||||
|
||||
#api-key {
|
||||
&:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
#api-key-copy {
|
||||
color: #0078e7;
|
||||
}
|
||||
|
||||
/* spinner */
|
||||
.loader,
|
||||
.loader:after {
|
||||
border-radius: 50%;
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
.loader {
|
||||
margin: 0px auto;
|
||||
font-size: 3px;
|
||||
vertical-align: middle;
|
||||
display: inline-block;
|
||||
text-indent: -9999em;
|
||||
border-top: 1.1em solid rgba(38,104,237, 0.2);
|
||||
border-right: 1.1em solid rgba(38,104,237, 0.2);
|
||||
border-bottom: 1.1em solid rgba(38,104,237, 0.2);
|
||||
border-left: 1.1em solid #2668ed;
|
||||
-webkit-transform: translateZ(0);
|
||||
-ms-transform: translateZ(0);
|
||||
transform: translateZ(0);
|
||||
-webkit-animation: load8 1.1s infinite linear;
|
||||
animation: load8 1.1s infinite linear;
|
||||
}
|
||||
@-webkit-keyframes load8 {
|
||||
0% {
|
||||
-webkit-transform: rotate(0deg);
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
100% {
|
||||
-webkit-transform: rotate(360deg);
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
@keyframes load8 {
|
||||
0% {
|
||||
-webkit-transform: rotate(0deg);
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
100% {
|
||||
-webkit-transform: rotate(360deg);
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
.snapshot-age {
|
||||
padding: 4px;
|
||||
background-color: #dfdfdf;
|
||||
border-radius: 3px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 4px;
|
||||
&.error {
|
||||
background-color: #ff0000;
|
||||
color: #fff;
|
||||
}
|
||||
}
|
||||
|
||||
#checkbox-operations {
|
||||
background: rgba(0, 0, 0, 0.05);
|
||||
padding: 1em;
|
||||
border-radius: 10px;
|
||||
margin-bottom: 1em;
|
||||
display: none;
|
||||
}
|
||||
.checkbox-uuid {
|
||||
> * {
|
||||
vertical-align: middle;
|
||||
}
|
||||
}
|
||||
|
||||
.inline-warning {
|
||||
> span {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
img.inline-warning-icon {
|
||||
display: inline;
|
||||
height: 26px;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
border: 1px solid #ff3300;
|
||||
padding: 0.5rem;
|
||||
border-radius: 5px;
|
||||
color: #ff3300;
|
||||
}
|
||||
549
changedetectionio/store.py
Normal file
@@ -0,0 +1,549 @@
|
||||
from flask import (
|
||||
flash
|
||||
)
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import uuid as uuid_builder
|
||||
from copy import deepcopy
|
||||
from os import path, unlink
|
||||
from threading import Lock
|
||||
import re
|
||||
import requests
|
||||
import secrets
|
||||
|
||||
from . model import App, Watch
|
||||
|
||||
# Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods?
|
||||
# Open a github issue if you know something :)
|
||||
# https://stackoverflow.com/questions/6190468/how-to-trigger-function-on-value-change
|
||||
class ChangeDetectionStore:
|
||||
lock = Lock()
|
||||
# For general updates/writes that can wait a few seconds
|
||||
needs_write = False
|
||||
|
||||
# For when we edit, we should write to disk
|
||||
needs_write_urgent = False
|
||||
|
||||
def __init__(self, datastore_path="/datastore", include_default_watches=True, version_tag="0.0.0"):
|
||||
# Should only be active for docker
|
||||
# logging.basicConfig(filename='/dev/stdout', level=logging.INFO)
|
||||
self.needs_write = False
|
||||
self.datastore_path = datastore_path
|
||||
self.json_store_path = "{}/url-watches.json".format(self.datastore_path)
|
||||
self.proxy_list = None
|
||||
self.stop_thread = False
|
||||
|
||||
self.__data = App.model()
|
||||
|
||||
# Base definition for all watchers
|
||||
# deepcopy part of #569 - not sure why its needed exactly
|
||||
self.generic_definition = deepcopy(Watch.model(datastore_path = datastore_path, default={}))
|
||||
|
||||
if path.isfile('changedetectionio/source.txt'):
|
||||
with open('changedetectionio/source.txt') as f:
|
||||
# Should be set in Dockerfile to look for /source.txt , this will give us the git commit #
|
||||
# So when someone gives us a backup file to examine, we know exactly what code they were running.
|
||||
self.__data['build_sha'] = f.read()
|
||||
|
||||
try:
|
||||
# @todo retest with ", encoding='utf-8'"
|
||||
with open(self.json_store_path) as json_file:
|
||||
from_disk = json.load(json_file)
|
||||
|
||||
# @todo isnt there a way todo this dict.update recursively?
|
||||
# Problem here is if the one on the disk is missing a sub-struct, it wont be present anymore.
|
||||
if 'watching' in from_disk:
|
||||
self.__data['watching'].update(from_disk['watching'])
|
||||
|
||||
if 'app_guid' in from_disk:
|
||||
self.__data['app_guid'] = from_disk['app_guid']
|
||||
|
||||
if 'settings' in from_disk:
|
||||
if 'headers' in from_disk['settings']:
|
||||
self.__data['settings']['headers'].update(from_disk['settings']['headers'])
|
||||
|
||||
if 'requests' in from_disk['settings']:
|
||||
self.__data['settings']['requests'].update(from_disk['settings']['requests'])
|
||||
|
||||
if 'application' in from_disk['settings']:
|
||||
self.__data['settings']['application'].update(from_disk['settings']['application'])
|
||||
|
||||
# Convert each existing watch back to the Watch.model object
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
watch['uuid']=uuid
|
||||
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
|
||||
print("Watching:", uuid, self.__data['watching'][uuid]['url'])
|
||||
|
||||
# First time ran, doesnt exist.
|
||||
except (FileNotFoundError, json.decoder.JSONDecodeError):
|
||||
if include_default_watches:
|
||||
print("Creating JSON store at", self.datastore_path)
|
||||
|
||||
self.add_watch(url='http://www.quotationspage.com/random.php', tag='test')
|
||||
self.add_watch(url='https://news.ycombinator.com/', tag='Tech news')
|
||||
self.add_watch(url='https://changedetection.io/CHANGELOG.txt', tag='changedetection.io')
|
||||
|
||||
self.__data['version_tag'] = version_tag
|
||||
|
||||
# Helper to remove password protection
|
||||
password_reset_lockfile = "{}/removepassword.lock".format(self.datastore_path)
|
||||
if path.isfile(password_reset_lockfile):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
unlink(password_reset_lockfile)
|
||||
|
||||
if not 'app_guid' in self.__data:
|
||||
import os
|
||||
import sys
|
||||
if "pytest" in sys.modules or "PYTEST_CURRENT_TEST" in os.environ:
|
||||
self.__data['app_guid'] = "test-" + str(uuid_builder.uuid4())
|
||||
else:
|
||||
self.__data['app_guid'] = str(uuid_builder.uuid4())
|
||||
|
||||
# Generate the URL access token for RSS feeds
|
||||
if not 'rss_access_token' in self.__data['settings']['application']:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['rss_access_token'] = secret
|
||||
|
||||
# Generate the API access token
|
||||
if not 'api_access_token' in self.__data['settings']['application']:
|
||||
secret = secrets.token_hex(16)
|
||||
self.__data['settings']['application']['api_access_token'] = secret
|
||||
|
||||
# Proxy list support - available as a selection in settings when text file is imported
|
||||
proxy_list_file = "{}/proxies.json".format(self.datastore_path)
|
||||
if path.isfile(proxy_list_file):
|
||||
self.import_proxy_list(proxy_list_file)
|
||||
|
||||
# Bump the update version by running updates
|
||||
self.run_updates()
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
# Finally start the thread that will manage periodic data saves to JSON
|
||||
save_data_thread = threading.Thread(target=self.save_datastore).start()
|
||||
|
||||
def set_last_viewed(self, uuid, timestamp):
|
||||
logging.debug("Setting watch UUID: {} last viewed to {}".format(uuid, int(timestamp)))
|
||||
self.data['watching'][uuid].update({'last_viewed': int(timestamp)})
|
||||
self.needs_write = True
|
||||
|
||||
def remove_password(self):
|
||||
self.__data['settings']['application']['password'] = False
|
||||
self.needs_write = True
|
||||
|
||||
def update_watch(self, uuid, update_obj):
|
||||
|
||||
# It's possible that the watch could be deleted before update
|
||||
if not self.__data['watching'].get(uuid):
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
|
||||
# In python 3.9 we have the |= dict operator, but that still will lose data on nested structures...
|
||||
for dict_key, d in self.generic_definition.items():
|
||||
if isinstance(d, dict):
|
||||
if update_obj is not None and dict_key in update_obj:
|
||||
self.__data['watching'][uuid][dict_key].update(update_obj[dict_key])
|
||||
del (update_obj[dict_key])
|
||||
|
||||
self.__data['watching'][uuid].update(update_obj)
|
||||
|
||||
self.needs_write = True
|
||||
|
||||
@property
|
||||
def threshold_seconds(self):
|
||||
seconds = 0
|
||||
for m, n in Watch.mtable.items():
|
||||
x = self.__data['settings']['requests']['time_between_check'].get(m)
|
||||
if x:
|
||||
seconds += x * n
|
||||
return seconds
|
||||
|
||||
@property
|
||||
def has_unviewed(self):
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
if watch.viewed == False:
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
has_unviewed = False
|
||||
for uuid, watch in self.__data['watching'].items():
|
||||
# #106 - Be sure this is None on empty string, False, None, etc
|
||||
# Default var for fetch_backend
|
||||
# @todo this may not be needed anymore, or could be easily removed
|
||||
if not self.__data['watching'][uuid]['fetch_backend']:
|
||||
self.__data['watching'][uuid]['fetch_backend'] = self.__data['settings']['application']['fetch_backend']
|
||||
|
||||
# Re #152, Return env base_url if not overriden, @todo also prefer the proxy pass url
|
||||
env_base_url = os.getenv('BASE_URL','')
|
||||
if not self.__data['settings']['application']['base_url']:
|
||||
self.__data['settings']['application']['base_url'] = env_base_url.strip('" ')
|
||||
|
||||
return self.__data
|
||||
|
||||
def get_all_tags(self):
|
||||
tags = []
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if watch['tag'] is None:
|
||||
continue
|
||||
# Support for comma separated list of tags.
|
||||
for tag in watch['tag'].split(','):
|
||||
tag = tag.strip()
|
||||
if tag not in tags:
|
||||
tags.append(tag)
|
||||
|
||||
tags.sort()
|
||||
return tags
|
||||
|
||||
def unlink_history_file(self, path):
|
||||
try:
|
||||
unlink(path)
|
||||
except (FileNotFoundError, IOError):
|
||||
pass
|
||||
|
||||
# Delete a single watch by UUID
|
||||
def delete(self, uuid):
|
||||
with self.lock:
|
||||
if uuid == 'all':
|
||||
self.__data['watching'] = {}
|
||||
|
||||
# GitHub #30 also delete history records
|
||||
for uuid in self.data['watching']:
|
||||
for path in self.data['watching'][uuid].history.values():
|
||||
self.unlink_history_file(path)
|
||||
|
||||
else:
|
||||
for path in self.data['watching'][uuid].history.values():
|
||||
self.unlink_history_file(path)
|
||||
|
||||
del self.data['watching'][uuid]
|
||||
|
||||
self.needs_write_urgent = True
|
||||
|
||||
# Clone a watch by UUID
|
||||
def clone(self, uuid):
|
||||
url = self.data['watching'][uuid]['url']
|
||||
tag = self.data['watching'][uuid]['tag']
|
||||
extras = self.data['watching'][uuid]
|
||||
new_uuid = self.add_watch(url=url, tag=tag, extras=extras)
|
||||
return new_uuid
|
||||
|
||||
def url_exists(self, url):
|
||||
|
||||
# Probably their should be dict...
|
||||
for watch in self.data['watching'].values():
|
||||
if watch['url'] == url:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Remove a watchs data but keep the entry (URL etc)
|
||||
def clear_watch_history(self, uuid):
|
||||
import pathlib
|
||||
|
||||
self.__data['watching'][uuid].update(
|
||||
{'last_checked': 0,
|
||||
'last_viewed': 0,
|
||||
'previous_md5': False,
|
||||
'last_notification_error': False,
|
||||
'last_error': False})
|
||||
|
||||
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
|
||||
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
|
||||
unlink(item)
|
||||
|
||||
# Force the attr to recalculate
|
||||
bump = self.__data['watching'][uuid].history
|
||||
|
||||
self.needs_write_urgent = True
|
||||
|
||||
def add_watch(self, url, tag="", extras=None, write_to_disk_now=True):
|
||||
|
||||
if extras is None:
|
||||
extras = {}
|
||||
# should always be str
|
||||
if tag is None or not tag:
|
||||
tag=''
|
||||
|
||||
# Incase these are copied across, assume it's a reference and deepcopy()
|
||||
apply_extras = deepcopy(extras)
|
||||
|
||||
# Was it a share link? try to fetch the data
|
||||
if (url.startswith("https://changedetection.io/share/")):
|
||||
try:
|
||||
r = requests.request(method="GET",
|
||||
url=url,
|
||||
# So we know to return the JSON instead of the human-friendly "help" page
|
||||
headers={'App-Guid': self.__data['app_guid']})
|
||||
res = r.json()
|
||||
|
||||
# List of permissible attributes we accept from the wild internet
|
||||
for k in ['url', 'tag',
|
||||
'paused', 'title',
|
||||
'previous_md5', 'headers',
|
||||
'body', 'method',
|
||||
'ignore_text', 'css_filter',
|
||||
'subtractive_selectors', 'trigger_text',
|
||||
'extract_title_as_title', 'extract_text',
|
||||
'text_should_not_be_present',
|
||||
'webdriver_js_execute_code']:
|
||||
if res.get(k):
|
||||
apply_extras[k] = res[k]
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Error fetching metadata for shared watch link", url, str(e))
|
||||
flash("Error fetching metadata for {}".format(url), 'error')
|
||||
return False
|
||||
|
||||
with self.lock:
|
||||
|
||||
# #Re 569
|
||||
new_watch = Watch.model(datastore_path=self.datastore_path, default={
|
||||
'url': url,
|
||||
'tag': tag
|
||||
})
|
||||
|
||||
new_uuid = new_watch['uuid']
|
||||
logging.debug("Added URL {} - {}".format(url, new_uuid))
|
||||
|
||||
for k in ['uuid', 'history', 'last_checked', 'last_changed', 'newest_history_key', 'previous_md5', 'viewed']:
|
||||
if k in apply_extras:
|
||||
del apply_extras[k]
|
||||
|
||||
new_watch.update(apply_extras)
|
||||
self.__data['watching'][new_uuid]=new_watch
|
||||
|
||||
self.__data['watching'][new_uuid].ensure_data_dir_exists()
|
||||
|
||||
if write_to_disk_now:
|
||||
self.sync_to_json()
|
||||
return new_uuid
|
||||
|
||||
def visualselector_data_is_ready(self, watch_uuid):
|
||||
output_path = "{}/{}".format(self.datastore_path, watch_uuid)
|
||||
screenshot_filename = "{}/last-screenshot.png".format(output_path)
|
||||
elements_index_filename = "{}/elements.json".format(output_path)
|
||||
if path.isfile(screenshot_filename) and path.isfile(elements_index_filename) :
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Save as PNG, PNG is larger but better for doing visual diff in the future
|
||||
def save_screenshot(self, watch_uuid, screenshot: bytes, as_error=False):
|
||||
if not self.data['watching'].get(watch_uuid):
|
||||
return
|
||||
|
||||
if as_error:
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error-screenshot.png")
|
||||
else:
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "last-screenshot.png")
|
||||
|
||||
self.data['watching'][watch_uuid].ensure_data_dir_exists()
|
||||
|
||||
with open(target_path, 'wb') as f:
|
||||
f.write(screenshot)
|
||||
f.close()
|
||||
|
||||
def save_error_text(self, watch_uuid, contents):
|
||||
if not self.data['watching'].get(watch_uuid):
|
||||
return
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
|
||||
|
||||
with open(target_path, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def save_xpath_data(self, watch_uuid, data, as_error=False):
|
||||
if not self.data['watching'].get(watch_uuid):
|
||||
return
|
||||
if as_error:
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
|
||||
else:
|
||||
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
|
||||
|
||||
with open(target_path, 'w') as f:
|
||||
f.write(json.dumps(data))
|
||||
f.close()
|
||||
|
||||
|
||||
def sync_to_json(self):
|
||||
logging.info("Saving JSON..")
|
||||
print("Saving JSON..")
|
||||
try:
|
||||
data = deepcopy(self.__data)
|
||||
except RuntimeError as e:
|
||||
# Try again in 15 seconds
|
||||
time.sleep(15)
|
||||
logging.error ("! Data changed when writing to JSON, trying again.. %s", str(e))
|
||||
self.sync_to_json()
|
||||
return
|
||||
else:
|
||||
|
||||
try:
|
||||
# Re #286 - First write to a temp file, then confirm it looks OK and rename it
|
||||
# This is a fairly basic strategy to deal with the case that the file is corrupted,
|
||||
# system was out of memory, out of RAM etc
|
||||
with open(self.json_store_path+".tmp", 'w') as json_file:
|
||||
json.dump(data, json_file, indent=4)
|
||||
os.replace(self.json_store_path+".tmp", self.json_store_path)
|
||||
except Exception as e:
|
||||
logging.error("Error writing JSON!! (Main JSON file save was skipped) : %s", str(e))
|
||||
|
||||
self.needs_write = False
|
||||
self.needs_write_urgent = False
|
||||
|
||||
# Thread runner, this helps with thread/write issues when there are many operations that want to update the JSON
|
||||
# by just running periodically in one thread, according to python, dict updates are threadsafe.
|
||||
def save_datastore(self):
|
||||
|
||||
while True:
|
||||
if self.stop_thread:
|
||||
print("Shutting down datastore thread")
|
||||
return
|
||||
|
||||
if self.needs_write or self.needs_write_urgent:
|
||||
self.sync_to_json()
|
||||
|
||||
# Once per minute is enough, more and it can cause high CPU usage
|
||||
# better here is to use something like self.app.config.exit.wait(1), but we cant get to 'app' from here
|
||||
for i in range(120):
|
||||
time.sleep(0.5)
|
||||
if self.stop_thread or self.needs_write_urgent:
|
||||
break
|
||||
|
||||
# Go through the datastore path and remove any snapshots that are not mentioned in the index
|
||||
# This usually is not used, but can be handy.
|
||||
def remove_unused_snapshots(self):
|
||||
print ("Removing snapshots from datastore that are not in the index..")
|
||||
|
||||
index=[]
|
||||
for uuid in self.data['watching']:
|
||||
for id in self.data['watching'][uuid].history:
|
||||
index.append(self.data['watching'][uuid].history[str(id)])
|
||||
|
||||
import pathlib
|
||||
|
||||
# Only in the sub-directories
|
||||
for uuid in self.data['watching']:
|
||||
for item in pathlib.Path(self.datastore_path).rglob(uuid+"/*.txt"):
|
||||
if not str(item) in index:
|
||||
print ("Removing",item)
|
||||
unlink(item)
|
||||
|
||||
def import_proxy_list(self, filename):
|
||||
with open(filename) as f:
|
||||
self.proxy_list = json.load(f)
|
||||
print ("Registered proxy list", list(self.proxy_list.keys()))
|
||||
|
||||
|
||||
|
||||
# Run all updates
|
||||
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct
|
||||
# So therefor - each `update_n` should be very careful about checking if it needs to actually run
|
||||
# Probably we should bump the current update schema version with each tag release version?
|
||||
def run_updates(self):
|
||||
import inspect
|
||||
import shutil
|
||||
|
||||
updates_available = []
|
||||
for i, o in inspect.getmembers(self, predicate=inspect.ismethod):
|
||||
m = re.search(r'update_(\d+)$', i)
|
||||
if m:
|
||||
updates_available.append(int(m.group(1)))
|
||||
updates_available.sort()
|
||||
|
||||
for update_n in updates_available:
|
||||
if update_n > self.__data['settings']['application']['schema_version']:
|
||||
print ("Applying update_{}".format((update_n)))
|
||||
# Wont exist on fresh installs
|
||||
if os.path.exists(self.json_store_path):
|
||||
shutil.copyfile(self.json_store_path, self.datastore_path+"/url-watches-before-{}.json".format(update_n))
|
||||
|
||||
try:
|
||||
update_method = getattr(self, "update_{}".format(update_n))()
|
||||
except Exception as e:
|
||||
print("Error while trying update_{}".format((update_n)))
|
||||
print(e)
|
||||
# Don't run any more updates
|
||||
return
|
||||
else:
|
||||
# Bump the version, important
|
||||
self.__data['settings']['application']['schema_version'] = update_n
|
||||
|
||||
# Convert minutes to seconds on settings and each watch
|
||||
def update_1(self):
|
||||
if self.data['settings']['requests'].get('minutes_between_check'):
|
||||
self.data['settings']['requests']['time_between_check']['minutes'] = self.data['settings']['requests']['minutes_between_check']
|
||||
# Remove the default 'hours' that is set from the model
|
||||
self.data['settings']['requests']['time_between_check']['hours'] = None
|
||||
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
if 'minutes_between_check' in watch:
|
||||
# Only upgrade individual watch time if it was set
|
||||
if watch.get('minutes_between_check', False):
|
||||
self.data['watching'][uuid]['time_between_check']['minutes'] = watch['minutes_between_check']
|
||||
|
||||
# Move the history list to a flat text file index
|
||||
# Better than SQLite because this list is only appended to, and works across NAS / NFS type setups
|
||||
def update_2(self):
|
||||
# @todo test running this on a newly updated one (when this already ran)
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
history = []
|
||||
|
||||
if watch.get('history', False):
|
||||
for d, p in watch['history'].items():
|
||||
d = int(d) # Used to be keyed as str, we'll fix this now too
|
||||
history.append("{},{}\n".format(d,p))
|
||||
|
||||
if len(history):
|
||||
target_path = os.path.join(self.datastore_path, uuid)
|
||||
if os.path.exists(target_path):
|
||||
with open(os.path.join(target_path, "history.txt"), "w") as f:
|
||||
f.writelines(history)
|
||||
else:
|
||||
logging.warning("Datastore history directory {} does not exist, skipping history import.".format(target_path))
|
||||
|
||||
# No longer needed, dynamically pulled from the disk when needed.
|
||||
# But we should set it back to a empty dict so we don't break if this schema runs on an earlier version.
|
||||
# In the distant future we can remove this entirely
|
||||
self.data['watching'][uuid]['history'] = {}
|
||||
|
||||
# We incorrectly stored last_changed when there was not a change, and then confused the output list table
|
||||
def update_3(self):
|
||||
# see https://github.com/dgtlmoon/changedetection.io/pull/835
|
||||
return
|
||||
|
||||
# `last_changed` not needed, we pull that information from the history.txt index
|
||||
def update_4(self):
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
try:
|
||||
# Remove it from the struct
|
||||
del(watch['last_changed'])
|
||||
except:
|
||||
continue
|
||||
return
|
||||
|
||||
def update_5(self):
|
||||
# If the watch notification body, title look the same as the global one, unset it, so the watch defaults back to using the main settings
|
||||
# In other words - the watch notification_title and notification_body are not needed if they are the same as the default one
|
||||
current_system_body = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n "))
|
||||
current_system_title = self.data['settings']['application']['notification_body'].translate(str.maketrans('', '', "\r\n "))
|
||||
for uuid, watch in self.data['watching'].items():
|
||||
try:
|
||||
watch_body = watch.get('notification_body', '')
|
||||
if watch_body and watch_body.translate(str.maketrans('', '', "\r\n ")) == current_system_body:
|
||||
# Looks the same as the default one, so unset it
|
||||
watch['notification_body'] = None
|
||||
|
||||
watch_title = watch.get('notification_title', '')
|
||||
if watch_title and watch_title.translate(str.maketrans('', '', "\r\n ")) == current_system_title:
|
||||
# Looks the same as the default one, so unset it
|
||||
watch['notification_title'] = None
|
||||
except Exception as e:
|
||||
continue
|
||||
return
|
||||
|
||||
103
changedetectionio/templates/_common_fields.jinja
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
{% from '_helpers.jinja' import render_field %}
|
||||
|
||||
{% macro render_common_settings_form(form, emailprefix, settings_application) %}
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_urls, rows=5, placeholder="Examples:
|
||||
Gitter - gitter://token/room
|
||||
Office365 - o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
|
||||
AWS SNS - sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
|
||||
SMTPS - mailtos://user:pass@mail.domain.com?to=receivingAddress@example.com",
|
||||
class="notification-urls" )
|
||||
}}
|
||||
<div class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Use <a target=_new href="https://github.com/caronc/apprise">AppRise URLs</a> for notification to just about any service! <i><a target=_new href="https://github.com/dgtlmoon/changedetection.io/wiki/Notification-configuration-notes">Please read the notification services wiki here for important configuration notes</a></i>.</li>
|
||||
<li><code>discord://</code> only supports a maximum <strong>2,000 characters</strong> of notification text, including the title.</li>
|
||||
<li><code>tgram://</code> bots cant send messages to other bots, so you should specify chat ID of non-bot user.</li>
|
||||
<li><code>tgram://</code> only supports very limited HTML and can fail when extra tags are sent, <a href="https://core.telegram.org/bots/api#html-style">read more here</a> (or use plaintext/markdown format)</li>
|
||||
</ul>
|
||||
</div>
|
||||
<br/>
|
||||
<a id="send-test-notification" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Send test notification</a>
|
||||
{% if emailprefix %}
|
||||
<a id="add-email-helper" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Add email</a>
|
||||
{% endif %}
|
||||
<a href="{{url_for('notification_logs')}}" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Notification debug logs</a>
|
||||
</div>
|
||||
<div id="notification-customisation" class="pure-control-group">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_title, class="m-d notification-title", placeholder=settings_application['notification_title']) }}
|
||||
<span class="pure-form-message-inline">Title for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.notification_body , rows=5, class="notification-body", placeholder=settings_application['notification_body']) }}
|
||||
<span class="pure-form-message-inline">Body for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
<!-- unsure -->
|
||||
{{ render_field(form.notification_format , class="notification-format") }}
|
||||
<span class="pure-form-message-inline">Format for all notifications</span>
|
||||
</div>
|
||||
<div class="pure-controls">
|
||||
<span class="pure-form-message-inline">
|
||||
These tokens can be used in the notification body and title to customise the notification text.
|
||||
|
||||
<table class="pure-table" id="token-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Token</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>{base_url}</code></td>
|
||||
<td>The URL of the changedetection.io instance you are running.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_url}</code></td>
|
||||
<td>The URL being watched.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_uuid}</code></td>
|
||||
<td>The UUID of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_title}</code></td>
|
||||
<td>The title of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{watch_tag}</code></td>
|
||||
<td>The tag of the watch.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{preview_url}</code></td>
|
||||
<td>The URL of the preview page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff}</code></td>
|
||||
<td>The diff output - differences only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_full}</code></td>
|
||||
<td>The diff output - full difference output</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{diff_url}</code></td>
|
||||
<td>The URL of the diff page generated by changedetection.io.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>{current_snapshot}</code></td>
|
||||
<td>The current snapshot value, useful when combined with JSON or CSS filters
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
<br/>
|
||||
URLs generated by changedetection.io (such as <code>{diff_url}</code>) require the <code>BASE_URL</code> environment variable set.<br/>
|
||||
Your <code>BASE_URL</code> var is currently "{{settings_application['current_base_url']}}"
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
57
changedetectionio/templates/_helpers.jinja
Normal file
@@ -0,0 +1,57 @@
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_checkbox_field(field) %}
|
||||
<div class="checkbox {% if field.errors %} error {% endif %}">
|
||||
{{ field(**kwargs)|safe }} {{ field.label }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_field(field) %}
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field.label }}</div>
|
||||
<div {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro render_simple_field(field) %}
|
||||
<span class="label {% if field.errors %}error{% endif %}">{{ field.label }}</span>
|
||||
<span {% if field.errors %} class="error" {% endif %}>{{ field(**kwargs)|safe }}
|
||||
{% if field.errors %}
|
||||
<ul class=errors>
|
||||
{% for error in field.errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
</span>
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro render_button(field) %}
|
||||
{{ field(**kwargs)|safe }}
|
||||
{% endmacro %}
|
||||
7
changedetectionio/templates/_pagination.jinja
Normal file
@@ -0,0 +1,7 @@
|
||||
{% macro pagination(sorted_watches, total_per_page, current_page) %}
|
||||
{{ sorted_watches|length }}
|
||||
|
||||
{% for row in sorted_watches|batch(total_per_page, ' ') %}
|
||||
{{ loop.index}}
|
||||
{% endfor %}
|
||||
{% endmacro %}
|
||||
@@ -5,6 +5,7 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="description" content="Self hosted website change detection.">
|
||||
<title>Change Detection{{extra_title}}</title>
|
||||
<link rel="alternate" type="application/rss+xml" title="Changedetection.io » Feed{% if active_tag %}- {{active_tag}}{% endif %}" href="{{ url_for('rss', tag=active_tag , token=app_rss_token)}}" />
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='pure-min.css')}}">
|
||||
<link rel="stylesheet" href="{{url_for('static_content', group='styles', filename='styles.css')}}">
|
||||
{% if extra_stylesheets %}
|
||||
@@ -12,7 +13,15 @@
|
||||
<link rel="stylesheet" href="{{ m }}?ver=1000">
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
<style>
|
||||
body::before {
|
||||
background-image: url({{url_for('static_content', group='images', filename='gradient-border.png')}});
|
||||
}
|
||||
</style>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div class="header">
|
||||
@@ -26,7 +35,7 @@
|
||||
{% if current_diff_url %}
|
||||
<a class=current-diff-url href="{{ current_diff_url }}"><span style="max-width: 30%; overflow: hidden;">{{ current_diff_url }}</span></a>
|
||||
{% else %}
|
||||
{% if new_version_available %}
|
||||
{% if new_version_available and not (has_password and not current_user.is_authenticated) %}
|
||||
<span id="new-version-text" class="pure-menu-heading"><a href="https://github.com/dgtlmoon/changedetection.io">A new version is available</a></span>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
@@ -35,13 +44,13 @@
|
||||
{% if current_user.is_authenticated or not has_password %}
|
||||
{% if not current_diff_url %}
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a>
|
||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('import_page')}}" class="pure-menu-link">IMPORT</a>
|
||||
</li>
|
||||
<li class="pure-menu-item">
|
||||
<a href="{{ url_for('settings_page')}}" class="pure-menu-link">SETTINGS</a>
|
||||
<a href="{{ url_for('get_backup')}}" class="pure-menu-link">BACKUP</a>
|
||||
</li>
|
||||
{% else %}
|
||||
<li class="pure-menu-item">
|
||||
@@ -68,7 +77,7 @@
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if hosted_sticky %}<div class="sticky-tab" id="hosted-sticky"><a href="https://lemonade.changedetection.io/start?ref={{guid}}">Let us host your instance!</a></div>{% endif %}
|
||||
{% if left_sticky %}<div class="sticky-tab" id="left-sticky"><a href="{{url_for('preview_page', uuid=uuid)}}">Show current snapshot</a></div> {% endif %}
|
||||
{% if right_sticky %}<div class="sticky-tab" id="right-sticky">{{ right_sticky }}</div> {% endif %}
|
||||
<section class="content">
|
||||
@@ -85,6 +94,13 @@
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
{% if session['share-link'] %}
|
||||
<ul class="messages with-share-link">
|
||||
<li class="message">Share this link: <span id="share-link">{{ session['share-link'] }}</span> <img style="height: 1em;display:inline-block;" src="{{url_for('static_content', group='images', filename='copy.svg')}}" /></li>
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
{% endblock %}
|
||||
@@ -2,27 +2,23 @@
|
||||
|
||||
{% block content %}
|
||||
<div class="edit-form">
|
||||
<form class="pure-form pure-form-stacked" action="{{url_for('scrub_page')}}" method="POST">
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked" action="{{url_for('clear_all_history')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
This will remove all version snapshots/data, but keep your list of URLs. <br/>
|
||||
This will remove version history (snapshots) for ALL watches, but keep your list of URLs! <br/>
|
||||
You may like to use the <strong>BACKUP</strong> link first.<br/>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<label for="confirmtext">Confirmation text</label>
|
||||
<input type="text" id="confirmtext" required="" name="confirmtext" value="" size="10"/>
|
||||
<span class="pure-form-message-inline">Type in the word <strong>scrub</strong> to confirm that you understand!</span>
|
||||
<span class="pure-form-message-inline">Type in the word <strong>clear</strong> to confirm that you understand.</span>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<label for="confirmtext">Optional: Limit deletion of snapshots to snapshots <i>newer</i> than date/time</label>
|
||||
<input type="datetime-local" id="limit_date" name="limit_date" />
|
||||
<span class="pure-form-message-inline">dd/mm/yyyy hh:mm (24 hour format)</span>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
<button type="submit" class="pure-button pure-button-primary">Scrub!</button>
|
||||
<button type="submit" class="pure-button pure-button-primary">Clear History!</button>
|
||||
</div>
|
||||
<br/>
|
||||
<div class="pure-control-group">
|
||||
@@ -30,6 +26,7 @@
|
||||
</div>
|
||||
</fieldset>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
@@ -1,6 +1,13 @@
|
||||
{% extends 'base.html' %}
|
||||
|
||||
{% block content %}
|
||||
<script>
|
||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
{% if last_error_screenshot %}
|
||||
const error_screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid, error_screenshot=1) }}";
|
||||
{% endif %}
|
||||
</script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff-overview.js')}}" defer></script>
|
||||
|
||||
<div id="settings">
|
||||
<h1>Differences</h1>
|
||||
@@ -18,7 +25,7 @@
|
||||
{% if versions|length >= 1 %}
|
||||
<label for="diff-version">Compare newest (<span id="current-v-date"></span>) with</label>
|
||||
<select id="diff-version" name="previous_version">
|
||||
{% for version in versions %}
|
||||
{% for version in versions|reverse %}
|
||||
<option value="{{version}}" {% if version== current_previous_version %} selected="" {% endif %}>
|
||||
{{version}}
|
||||
</option>
|
||||
@@ -35,26 +42,70 @@
|
||||
<div id="diff-jump">
|
||||
<a onclick="next_diff();">Jump</a>
|
||||
</div>
|
||||
<div id="diff-ui">
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
<td id="a" style="display: none;">{{previous}}</td>
|
||||
<td id="b" style="display: none;">{{newest}}</td>
|
||||
<td id="diff-col">
|
||||
<span id="result"></span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
Diff algorithm from the amazing <a href="https://github.com/kpdecker/jsdiff">github.com/kpdecker/jsdiff</a>
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<div class="tabs">
|
||||
<ul>
|
||||
{% if last_error_text %}<li class="tab" id="error-text-tab"><a href="#error-text">Error Text</a></li> {% endif %}
|
||||
{% if last_error_screenshot %}<li class="tab" id="error-screenshot-tab"><a href="#error-screenshot">Error Screenshot</a></li> {% endif %}
|
||||
<li class="tab" id=""><a href="#text">Text</a></li>
|
||||
<li class="tab" id="screenshot-tab"><a href="#screenshot">Screenshot</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<script src="/static/js/diff.js"></script>
|
||||
<script defer="">
|
||||
<div id="diff-ui">
|
||||
<div class="tab-pane-inner" id="error-text">
|
||||
<div class="snapshot-age error">{{watch_a.error_text_ctime|format_seconds_ago}} seconds ago</div>
|
||||
<pre>
|
||||
{{ last_error_text }}
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="error-screenshot">
|
||||
<div class="snapshot-age error">{{watch_a.snapshot_error_screenshot_ctime|format_seconds_ago}} seconds ago</div>
|
||||
<img id="error-screenshot-img" style="max-width: 80%" alt="Current error-ing screenshot from most recent request"/>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="text">
|
||||
<div class="tip">Pro-tip: Use <strong>show current snapshot</strong> tab to visualise what will be ignored.
|
||||
</div>
|
||||
<div class="snapshot-age">{{watch_a.snapshot_text_ctime|format_timestamp_timeago}}</div>
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<!-- just proof of concept copied straight from github.com/kpdecker/jsdiff -->
|
||||
<td id="a" style="display: none;">{{previous}}</td>
|
||||
<td id="b" style="display: none;">{{newest}}</td>
|
||||
<td id="diff-col">
|
||||
<span id="result"></span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
Diff algorithm from the amazing <a href="https://github.com/kpdecker/jsdiff">github.com/kpdecker/jsdiff</a>
|
||||
</div>
|
||||
<div class="tab-pane-inner" id="screenshot">
|
||||
<div class="tip">
|
||||
For now, Differences are performed on text, not graphically, only the latest screenshot is available.
|
||||
</div>
|
||||
{% if is_html_webdriver %}
|
||||
{% if screenshot %}
|
||||
<div class="snapshot-age">{{watch_a.snapshot_screenshot_ctime|format_timestamp_timeago}}</div>
|
||||
<img style="max-width: 80%" id="screenshot-img" alt="Current screenshot from most recent request"/>
|
||||
{% else %}
|
||||
No screenshot available just yet! Try rechecking the page.
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<strong>Screenshot requires Playwright/WebDriver enabled</strong>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='diff.js')}}"></script>
|
||||
|
||||
<script defer="">
|
||||
|
||||
var a = document.getElementById('a');
|
||||
var b = document.getElementById('b');
|
||||
332
changedetectionio/templates/edit.html
Normal file
@@ -0,0 +1,332 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.jinja' import render_field, render_checkbox_field, render_button %}
|
||||
{% from '_common_fields.jinja' import render_common_settings_form %}
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<script>
|
||||
const notification_base_url="{{url_for('ajax_callback_send_notification_test')}}";
|
||||
const watch_visual_selector_data_url="{{url_for('static_content', group='visual_selector_data', filename=uuid)}}";
|
||||
const screenshot_url="{{url_for('static_content', group='screenshot', filename=uuid)}}";
|
||||
const playwright_enabled={% if playwright_enabled %} true {% else %} false {% endif %};
|
||||
|
||||
{% if emailprefix %}
|
||||
const email_notification_prefix=JSON.parse('{{ emailprefix|tojson }}');
|
||||
{% endif %}
|
||||
|
||||
</script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='watch-settings.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='notifications.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='visual-selector.js')}}" defer></script>
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='limit.js')}}" defer></script>
|
||||
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#general">General</a></li>
|
||||
<li class="tab"><a href="#request">Request</a></li>
|
||||
<li class="tab"><a id="visualselector-tab" href="#visualselector">Visual Filter Selector</a></li>
|
||||
<li class="tab"><a href="#filters-and-triggers">Filters & Triggers</a></li>
|
||||
<li class="tab"><a href="#notifications">Notifications</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-stacked"
|
||||
action="{{ url_for('edit_page', uuid=uuid, next = request.args.get('next'), unpause_on_save = request.args.get('unpause_on_save')) }}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
|
||||
<div class="tab-pane-inner" id="general">
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.url, placeholder="https://...", required=true, class="m-d") }}
|
||||
<span class="pure-form-message-inline">Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a></span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.title, class="m-d") }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.tag) }}
|
||||
<span class="pure-form-message-inline">Organisational tag/group name used in the main listing page</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.time_between_check, class="time-check-widget") }}
|
||||
{% if has_empty_checktime %}
|
||||
<span class="pure-form-message-inline">Currently using the <a
|
||||
href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>, change to another value if you want to be specific.</span>
|
||||
{% else %}
|
||||
<span class="pure-form-message-inline">Set to blank to use the <a
|
||||
href="{{ url_for('settings_page', uuid=uuid) }}">default global settings</a>.</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.extract_title_as_title) }}
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.filter_failure_notification_send) }}
|
||||
<span class="pure-form-message-inline">
|
||||
Sends a notification when the filter can no longer be seen on the page, good for knowing when the page changed and your filter will not work anymore.
|
||||
</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="request">
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.fetch_backend, class="fetch-backend") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Use the <strong>Basic</strong> method (default) where your watched site doesn't need Javascript to render.</p>
|
||||
<p>The <strong>Chrome/Javascript</strong> method requires a network connection to a running WebDriver+Chrome server, set by the ENV var 'WEBDRIVER_URL'. </p>
|
||||
</span>
|
||||
</div>
|
||||
{% if form.proxy %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_field(form.proxy, class="fetch-backend-proxy") }}
|
||||
<span class="pure-form-message-inline">
|
||||
Choose a proxy for this watch
|
||||
</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.ignore_status_codes) }}
|
||||
</div>
|
||||
<fieldset id="webdriver-override-options">
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.webdriver_delay) }}
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>If you're having trouble waiting for the page to be fully rendered (text missing etc), try increasing the 'wait' time here.</strong>
|
||||
<br/>
|
||||
This will wait <i>n</i> seconds before extracting the text.
|
||||
{% if using_global_webdriver_wait %}
|
||||
<br/><strong>Using the current global default settings</strong>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.webdriver_js_execute_code) }}
|
||||
<div class="pure-form-message-inline">
|
||||
Run this code before performing change detection, handy for filling in fields and other actions <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Run-JavaScript-before-change-detection">More help and examples here</a>
|
||||
</div>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset class="pure-group" id="requests-override-options">
|
||||
{% if not playwright_enabled %}
|
||||
<div class="pure-form-message-inline">
|
||||
<strong>Request override is currently only used by the <i>Basic fast Plaintext/HTTP Client</i> method.</strong>
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="pure-control-group" id="request-method">
|
||||
{{ render_field(form.method) }}
|
||||
</div>
|
||||
<div class="pure-control-group" id="request-headers">
|
||||
{{ render_field(form.headers, rows=5, placeholder="Example
|
||||
Cookie: foobar
|
||||
User-Agent: wonderbra 1.0") }}
|
||||
</div>
|
||||
<div class="pure-control-group" id="request-body">
|
||||
{{ render_field(form.body, rows=5, placeholder="Example
|
||||
{
|
||||
\"name\":\"John\",
|
||||
\"age\":30,
|
||||
\"car\":null
|
||||
}") }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="notifications">
|
||||
<fieldset>
|
||||
<div class="pure-control-group inline-radio">
|
||||
{{ render_checkbox_field(form.notification_muted) }}
|
||||
</div>
|
||||
<div class="field-group" id="notification-field-group">
|
||||
{% if has_default_notification_urls %}
|
||||
<div class="inline-warning">
|
||||
<img class="inline-warning-icon" src="{{url_for('static_content', group='images', filename='notice.svg')}}" alt="Look out!" title="Lookout!"/>
|
||||
There are <a href="{{ url_for('settings_page')}}#notifications">system-wide notification URLs enabled</a>, this form will override notification settings for this watch only ‐ an empty Notification URL list here will still send notifications.
|
||||
</div>
|
||||
{% endif %}
|
||||
<a href="#notifications" id="notification-setting-reset-to-default" class="pure-button button-xsmall" style="right: 20px; top: 20px; position: absolute; background-color: #5f42dd; border-radius: 4px; font-size: 70%; color: #fff">Use system defaults</a>
|
||||
|
||||
{{ render_common_settings_form(form, emailprefix, settings_application) }}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="filters-and-triggers">
|
||||
<div class="pure-control-group">
|
||||
<strong>Pro-tips:</strong><br/>
|
||||
<ul>
|
||||
<li>
|
||||
Use the preview page to see your filters and triggers highlighted.
|
||||
</li>
|
||||
<li>
|
||||
Some sites use JavaScript to create the content, for this you should <a href="https://github.com/dgtlmoon/changedetection.io/wiki/Fetching-pages-with-WebDriver">use the Chrome/WebDriver Fetcher</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_checkbox_field(form.check_unique_lines) }}
|
||||
<span class="pure-form-message-inline">Good for websites that just move the content around, and you want to know when NEW content is added, compares new lines against all history for this watch.</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
<div class="pure-control-group">
|
||||
{% set field = render_field(form.css_filter,
|
||||
placeholder=".class-name or #some-id, or other CSS selector rule.",
|
||||
class="m-d")
|
||||
%}
|
||||
{{ field }}
|
||||
{% if '/text()' in field %}
|
||||
<span class="pure-form-message-inline"><strong>Note!: //text() function does not work where the <element> contains <![CDATA[]]></strong></span><br/>
|
||||
{% endif %}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>CSS - Limit text to this CSS rule, only text matching this CSS rule is included.</li>
|
||||
<li>JSON - Limit text to this JSON rule, using <a href="https://pypi.org/project/jsonpath-ng/">JSONPath</a>, prefix with <code>"json:"</code>, use <code>json:$</code> to force re-formatting if required, <a
|
||||
href="https://jsonpath.com/" target="new">test your JSONPath here</a></li>
|
||||
<li>XPath - Limit text to this XPath rule, simply start with a forward-slash,
|
||||
<ul>
|
||||
<li>Example: <code>//*[contains(@class, 'sametext')]</code> or <code>xpath://*[contains(@class, 'sametext')]</code>, <a
|
||||
href="http://xpather.com/" target="new">test your XPath here</a></li>
|
||||
<li>Example: Get all titles from an RSS feed <code>//title/text()</code></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
Please be sure that you thoroughly understand how to write CSS or JSONPath, XPath selector rules before filing an issue on GitHub! <a
|
||||
href="https://github.com/dgtlmoon/changedetection.io/wiki/CSS-Selector-help">here for more CSS selector help</a>.<br/>
|
||||
</span>
|
||||
</div>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.subtractive_selectors, rows=5, placeholder="header
|
||||
footer
|
||||
nav
|
||||
.stockticker") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li> Remove HTML element(s) by CSS selector before text conversion. </li>
|
||||
<li> Add multiple elements or CSS selectors per line to ignore multiple parts of the HTML. </li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
<fieldset class="pure-group">
|
||||
{{ render_field(form.ignore_text, rows=5, placeholder="Some text to ignore in a line
|
||||
/some.regex\d{2}/ for case-INsensitive regex
|
||||
") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Each line processed separately, any line matching will be ignored (removed before creating the checksum)</li>
|
||||
<li>Regular Expression support, wrap the entire line in forward slash <code>/regex/</code></li>
|
||||
<li>Changing this will affect the comparison checksum which may trigger an alert</li>
|
||||
<li>Use the preview/show current tab to see ignores</li>
|
||||
</ul>
|
||||
</span>
|
||||
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.trigger_text, rows=5, placeholder="Some text to wait for in a line
|
||||
/some.regex\d{2}/ for case-INsensitive regex
|
||||
") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Text to wait for before triggering a change/notification, all text and regex are tested <i>case-insensitive</i>.</li>
|
||||
<li>Trigger text is processed from the result-text that comes out of any CSS/JSON Filters for this watch</li>
|
||||
<li>Each line is processed separately (think of each line as "OR")</li>
|
||||
<li>Note: Wrap in forward slash / to use regex example: <code>/foo\d/</code></li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.text_should_not_be_present, rows=5, placeholder="For example: Out of stock
|
||||
Sold out
|
||||
Not in stock
|
||||
Unavailable") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Block change-detection while this text is on the page, all text and regex are tested <i>case-insensitive</i>, good for waiting for when a product is available again</li>
|
||||
<li>Block text is processed from the result-text that comes out of any CSS/JSON Filters for this watch</li>
|
||||
<li>All lines here must not exist (think of each line as "OR")</li>
|
||||
<li>Note: Wrap in forward slash / to use regex example: <code>/foo\d/</code></li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{{ render_field(form.extract_text, rows=5, placeholder="\d+ online") }}
|
||||
<span class="pure-form-message-inline">
|
||||
<ul>
|
||||
<li>Extracts text in the final output (line by line) after other filters using regular expressions;
|
||||
<ul>
|
||||
<li>Regular expression ‐ example <code>/reports.+?2022/i</code></li>
|
||||
<li>Use <code>//(?aiLmsux))</code> type flags (more <a href="https://docs.python.org/3/library/re.html#index-15">information here</a>)<br/></li>
|
||||
<li>Keyword example ‐ example <code>Out of stock</code></li>
|
||||
<li>Use groups to extract just that text ‐ example <code>/reports.+?(\d+)/i</code> returns a list of years only</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>One line per regular-expression/ string match</li>
|
||||
</ul>
|
||||
</span>
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner visual-selector-ui" id="visualselector">
|
||||
<img id="beta-logo" src="{{url_for('static_content', group='images', filename='beta-logo.png')}}">
|
||||
<strong>Pro-tip:</strong> This tool is only for limiting which elements will be included on a change-detection, not for interacting with browser directly.
|
||||
<fieldset>
|
||||
<div class="pure-control-group">
|
||||
{% if visualselector_enabled %}
|
||||
{% if visualselector_data_is_ready %}
|
||||
<div id="selector-header">
|
||||
<a id="clear-selector" class="pure-button button-secondary button-xsmall" style="font-size: 70%">Clear selection</a>
|
||||
<i class="fetching-update-notice" style="font-size: 80%;">One moment, fetching screenshot and element information..</i>
|
||||
</div>
|
||||
<div id="selector-wrapper">
|
||||
<!-- request the screenshot and get the element offset info ready -->
|
||||
<!-- use img src ready load to know everything is ready to map out -->
|
||||
<!-- @todo: maybe something interesting like a field to select 'elements that contain text... and their parents n' -->
|
||||
<img id="selector-background" />
|
||||
<canvas id="selector-canvas"></canvas>
|
||||
|
||||
</div>
|
||||
<div id="selector-current-xpath" style="overflow-x: hidden"><strong>Currently:</strong> <span class="text">Loading...</span></div>
|
||||
|
||||
<span class="pure-form-message-inline">
|
||||
<p><span style="font-weight: bold">Beta!</span> The Visual Selector is new and there may be minor bugs, please report pages that dont work, help us to improve this software!</p>
|
||||
</span>
|
||||
|
||||
{% else %}
|
||||
<span class="pure-form-message-inline">Screenshot and element data is not available or not yet ready.</span>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<span class="pure-form-message-inline">
|
||||
<p>Sorry, this functionality only works with Playwright/Chrome enabled watches.</p>
|
||||
<p>Enable the Playwright Chrome fetcher, or alternatively try our <a href="https://lemonade.changedetection.io/start">very affordable subscription based service</a>.</p>
|
||||
<p>This is because Selenium/WebDriver can not extract full page screenshots reliably.</p>
|
||||
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div id="actions">
|
||||
<div class="pure-control-group">
|
||||
{{ render_button(form.save_button) }}
|
||||
<a href="{{url_for('form_delete', uuid=uuid)}}"
|
||||
class="pure-button button-small button-error ">Delete</a>
|
||||
<a href="{{url_for('clear_watch_history', uuid=uuid)}}"
|
||||
class="pure-button button-small button-error ">Clear History</a>
|
||||
<a href="{{url_for('form_clone', uuid=uuid)}}"
|
||||
class="pure-button button-small ">Create Copy</a>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||
86
changedetectionio/templates/import.html
Normal file
@@ -0,0 +1,86 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
<script type="text/javascript" src="{{url_for('static_content', group='js', filename='tabs.js')}}" defer></script>
|
||||
<div class="edit-form monospaced-textarea">
|
||||
|
||||
<div class="tabs collapsable">
|
||||
<ul>
|
||||
<li class="tab" id=""><a href="#url-list">URL List</a></li>
|
||||
<li class="tab"><a href="#distill-io">Distill.io</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="box-wrap inner">
|
||||
<form class="pure-form pure-form-aligned" action="{{url_for('import_page')}}" method="POST">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>
|
||||
<div class="tab-pane-inner" id="url-list">
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Enter one URL per line, and optionally add tags for each URL after a space, delineated by comma
|
||||
(,):
|
||||
<br>
|
||||
<code>https://example.com tag1, tag2, last tag</code>
|
||||
<br>
|
||||
URLs which do not pass validation will stay in the textarea.
|
||||
</legend>
|
||||
|
||||
|
||||
<textarea name="urls" class="pure-input-1-2" placeholder="https://"
|
||||
style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" rows="25">{{ import_url_list_remaining }}</textarea>
|
||||
</fieldset>
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
<div class="tab-pane-inner" id="distill-io">
|
||||
|
||||
|
||||
<fieldset class="pure-group">
|
||||
<legend>
|
||||
Copy and Paste your Distill.io watch 'export' file, this should be a JSON file.</br>
|
||||
This is <i>experimental</i>, supported fields are <code>name</code>, <code>uri</code>, <code>tags</code>, <code>config:selections</code>, the rest (including <code>schedule</code>) are ignored.
|
||||
<br/>
|
||||
<p>
|
||||
How to export? <a href="https://distill.io/docs/web-monitor/how-export-and-import-monitors/">https://distill.io/docs/web-monitor/how-export-and-import-monitors/</a><br/>
|
||||
Be sure to set your default fetcher to Chrome if required.</br>
|
||||
</p>
|
||||
</legend>
|
||||
|
||||
|
||||
<textarea name="distill-io" class="pure-input-1-2" style="width: 100%;
|
||||
font-family:monospace;
|
||||
white-space: pre;
|
||||
overflow-wrap: normal;
|
||||
overflow-x: scroll;" placeholder="Example Distill.io JSON export file
|
||||
|
||||
{
|
||||
"client": {
|
||||
"local": 1
|
||||
},
|
||||
"data": [
|
||||
{
|
||||
"name": "Unraid | News",
|
||||
"uri": "https://unraid.net/blog",
|
||||
"config": "{\"selections\":[{\"frames\":[{\"index\":0,\"excludes\":[],\"includes\":[{\"type\":\"xpath\",\"expr\":\"(//div[@id='App']/div[contains(@class,'flex')]/main[contains(@class,'relative')]/section[contains(@class,'relative')]/div[@class='container']/div[contains(@class,'flex')]/div[contains(@class,'w-full')])[1]\"}]}],\"dynamic\":true,\"delay\":2}],\"ignoreEmptyText\":true,\"includeStyle\":false,\"dataAttr\":\"text\"}",
|
||||
"tags": [],
|
||||
"content_type": 2,
|
||||
"state": 40,
|
||||
"schedule": "{\"type\":\"INTERVAL\",\"params\":{\"interval\":4447}}",
|
||||
"ts": "2022-03-27T15:51:15.667Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
" rows="25">{{ original_distill_json }}</textarea>
|
||||
</fieldset>
|
||||
</div>
|
||||
<button type="submit" class="pure-button pure-input-1-2 pure-button-primary">Import</button>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endblock %}
|
||||