Files
changedetection.io/changedetectionio/api/Import.py
2025-10-28 10:38:51 +01:00

67 lines
2.2 KiB
Python

from changedetectionio.strtobool import strtobool
from flask_restful import abort, Resource
from flask import request
from functools import wraps
from . import auth, validate_openapi_request
from ..validate_url import is_safe_valid_url
def default_content_type(content_type='text/plain'):
"""Decorator to set a default Content-Type header if none is provided."""
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
if not request.content_type:
# Set default content type in the request environment
request.environ['CONTENT_TYPE'] = content_type
return f(*args, **kwargs)
return wrapper
return decorator
class Import(Resource):
def __init__(self, **kwargs):
# datastore is a black box dependency
self.datastore = kwargs['datastore']
@auth.check_token
@default_content_type('text/plain') #3547 #3542
@validate_openapi_request('importWatches')
def post(self):
"""Import a list of watched URLs."""
extras = {}
if request.args.get('proxy'):
plist = self.datastore.proxy_list
if not request.args.get('proxy') in plist:
return "Invalid proxy choice, currently supported proxies are '{}'".format(', '.join(plist)), 400
else:
extras['proxy'] = request.args.get('proxy')
dedupe = strtobool(request.args.get('dedupe', 'true'))
tags = request.args.get('tag')
tag_uuids = request.args.get('tag_uuids')
if tag_uuids:
tag_uuids = tag_uuids.split(',')
urls = request.get_data().decode('utf8').splitlines()
added = []
for url in urls:
url = url.strip()
if not len(url):
continue
# If hosts that only contain alphanumerics are allowed ("localhost" for example)
if not is_safe_valid_url(url):
return f"Invalid or unsupported URL - {url}", 400
if dedupe and self.datastore.url_exists(url):
continue
new_uuid = self.datastore.add_watch(url=url, extras=extras, tag=tags, tag_uuids=tag_uuids)
added.append(new_uuid)
return added