mirror of
				https://github.com/dgtlmoon/changedetection.io.git
				synced 2025-10-31 06:37:41 +00:00 
			
		
		
		
	Compare commits
	
		
			6 Commits
		
	
	
		
			regression
			...
			api-import
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 5d4ebff235 | ||
|   | 74d4c580cf | ||
|   | b899579ca8 | ||
|   | 1f7f1e2bfa | ||
|   | 0df773a12c | ||
|   | 6a5566e771 | 
| @@ -240,6 +240,10 @@ def changedetection_app(config=None, datastore_o=None): | ||||
|     watch_api.add_resource(api_v1.Watch, '/api/v1/watch/<string:uuid>', | ||||
|                            resource_class_kwargs={'datastore': datastore, 'update_q': update_q}) | ||||
|  | ||||
|     watch_api.add_resource(api_v1.Import, | ||||
|                            '/api/v1/import', | ||||
|                            resource_class_kwargs={'datastore': datastore}) | ||||
|  | ||||
|     watch_api.add_resource(api_v1.SystemInfo, '/api/v1/systeminfo', | ||||
|                            resource_class_kwargs={'datastore': datastore, 'update_q': update_q}) | ||||
|  | ||||
|   | ||||
| @@ -296,6 +296,62 @@ class CreateWatch(Resource): | ||||
|  | ||||
|         return list, 200 | ||||
|  | ||||
| class Import(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|         self.datastore = kwargs['datastore'] | ||||
|  | ||||
|     @auth.check_token | ||||
|     def post(self): | ||||
|         """ | ||||
|         @api {post} /api/v1/import - Import a list of watched URLs | ||||
|         @apiDescription Accepts a line-feed separated list of URLs to import, additionally with ?tag_uuids=(tag  id), ?tag=(name), ?proxy={key}, ?dedupe=true (default true) one URL per line. | ||||
|         @apiExample {curl} Example usage: | ||||
|             curl http://localhost:5000/api/v1/import --data-binary @list-of-sites.txt -H"x-api-key:8a111a21bc2f8f1dd9b9353bbd46049a" | ||||
|         @apiName Import | ||||
|         @apiGroup Watch | ||||
|         @apiSuccess (200) {List} OK List of watch UUIDs added | ||||
|         @apiSuccess (500) {String} ERR Some other error | ||||
|         """ | ||||
|  | ||||
|         extras = {} | ||||
|  | ||||
|         if request.args.get('proxy'): | ||||
|             plist = self.datastore.proxy_list | ||||
|             if not request.args.get('proxy') in plist: | ||||
|                 return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400 | ||||
|             else: | ||||
|                 extras['proxy'] = request.args.get('proxy') | ||||
|  | ||||
|         dedupe = strtobool(request.args.get('dedupe', 'true')) | ||||
|  | ||||
|         tags = request.args.get('tag') | ||||
|         tag_uuids = request.args.get('tag_uuids') | ||||
|  | ||||
|         if tag_uuids: | ||||
|             tag_uuids = tag_uuids.split(',') | ||||
|  | ||||
|         urls = request.get_data().decode('utf8').splitlines() | ||||
|         added = [] | ||||
|         allow_simplehost = not strtobool(os.getenv('BLOCK_SIMPLEHOSTS', 'False')) | ||||
|         for url in urls: | ||||
|             url = url.strip() | ||||
|             if not len(url): | ||||
|                 continue | ||||
|  | ||||
|             # If hosts that only contain alphanumerics are allowed ("localhost" for example) | ||||
|             if not validators.url(url, simple_host=allow_simplehost): | ||||
|                 return f"Invalid or unsupported URL - {url}", 400 | ||||
|  | ||||
|             if dedupe and self.datastore.url_exists(url): | ||||
|                 continue | ||||
|  | ||||
|             new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags, tag_uuids=tag_uuids) | ||||
|             added.append(new_uuid) | ||||
|  | ||||
|         return added | ||||
|  | ||||
|  | ||||
| class SystemInfo(Resource): | ||||
|     def __init__(self, **kwargs): | ||||
|         # datastore is a black box dependency | ||||
|   | ||||
| @@ -46,6 +46,9 @@ from apprise.decorators import notify | ||||
| @notify(on="puts") | ||||
| def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     import requests | ||||
|     from apprise.utils import parse_url as apprise_parse_url | ||||
|     from apprise.URLBase import URLBase | ||||
|  | ||||
|     url = kwargs['meta'].get('url') | ||||
|  | ||||
|     if url.startswith('post'): | ||||
| @@ -68,16 +71,46 @@ def apprise_custom_api_call_wrapper(body, title, notify_type, *args, **kwargs): | ||||
|     url = url.replace('delete://', 'http://') | ||||
|     url = url.replace('deletes://', 'https://') | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     headers = {} | ||||
|     params = {} | ||||
|     auth = None | ||||
|  | ||||
|     # Convert /foobar?+some-header=hello to proper header dictionary | ||||
|     results = apprise_parse_url(url) | ||||
|     if results: | ||||
|         # Add our headers that the user can potentially over-ride if they wish | ||||
|         # to to our returned result set and tidy entries by unquoting them | ||||
|         headers = {URLBase.unquote(x): URLBase.unquote(y) | ||||
|                    for x, y in results['qsd+'].items()} | ||||
|  | ||||
|         # https://github.com/caronc/apprise/wiki/Notify_Custom_JSON#get-parameter-manipulation | ||||
|         # In Apprise, it relies on prefixing each request arg with "-", because it uses say &method=update as a flag for apprise | ||||
|         # but here we are making straight requests, so we need todo convert this against apprise's logic | ||||
|         for k, v in results['qsd'].items(): | ||||
|             if not k.strip('+-') in results['qsd+'].keys(): | ||||
|                 params[URLBase.unquote(k)] = URLBase.unquote(v) | ||||
|  | ||||
|  | ||||
|         # Determine Authentication | ||||
|         auth = '' | ||||
|         if results.get('user') and results.get('password'): | ||||
|             auth = (URLBase.unquote(results.get('user')), URLBase.unquote(results.get('user'))) | ||||
|         elif results.get('user'): | ||||
|             auth = (URLBase.unquote(results.get('user'))) | ||||
|  | ||||
|     # Try to auto-guess if it's JSON | ||||
|     try: | ||||
|         json.loads(body) | ||||
|         headers = {'Content-Type': 'application/json; charset=utf-8'} | ||||
|         headers['Content-Type'] = 'application/json; charset=utf-8' | ||||
|     except ValueError as e: | ||||
|         pass | ||||
|  | ||||
|  | ||||
|     r(url, headers=headers, data=body) | ||||
|     r(results.get('url'), | ||||
|       headers=headers, | ||||
|       data=body, | ||||
|       params=params, | ||||
|       auth=auth | ||||
|       ) | ||||
|  | ||||
|  | ||||
| def process_notification(n_object, datastore): | ||||
|   | ||||
| @@ -234,7 +234,7 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         # Probably their should be dict... | ||||
|         for watch in self.data['watching'].values(): | ||||
|             if watch['url'] == url: | ||||
|             if watch['url'].lower() == url.lower(): | ||||
|                 return True | ||||
|  | ||||
|         return False | ||||
| @@ -333,7 +333,8 @@ class ChangeDetectionStore: | ||||
|  | ||||
|         # Or if UUIDs given directly | ||||
|         if tag_uuids: | ||||
|             apply_extras['tags'] = list(set(apply_extras['tags'] + tag_uuids)) | ||||
|             for t in tag_uuids: | ||||
|                 apply_extras['tags'] = list(set(apply_extras['tags'] + [t.strip()])) | ||||
|  | ||||
|         # Make any uuids unique | ||||
|         if apply_extras.get('tags'): | ||||
|   | ||||
| @@ -357,3 +357,24 @@ def test_api_watch_PUT_update(client, live_server): | ||||
|     # Cleanup everything | ||||
|     res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) | ||||
|     assert b'Deleted' in res.data | ||||
|  | ||||
|  | ||||
| def test_api_import(client, live_server): | ||||
|     api_key = extract_api_key_from_UI(client) | ||||
|  | ||||
|     res = client.post( | ||||
|         url_for("import") + "?tag=import-test", | ||||
|         data='https://website1.com\r\nhttps://website2.com', | ||||
|         headers={'x-api-key': api_key}, | ||||
|         follow_redirects=True | ||||
|     ) | ||||
|  | ||||
|     assert res.status_code == 200 | ||||
|     assert len(res.json) == 2 | ||||
|     res = client.get(url_for("index")) | ||||
|     assert b"https://website1.com" in res.data | ||||
|     assert b"https://website2.com" in res.data | ||||
|  | ||||
|     # Should see the new tag in the tag/groups list | ||||
|     res = client.get(url_for('tags.tags_overview_page')) | ||||
|     assert b'import-test' in res.data | ||||
|   | ||||
		Reference in New Issue
	
	Block a user