Basic headers fetch from file

1448-basic-headers-fetch-from-file
dgtlmoon 2 years ago
parent 143971123d
commit 46ab2846d0

@ -1444,6 +1444,7 @@ def check_for_new_version():
# Check daily # Check daily
app.config.exit.wait(86400) app.config.exit.wait(86400)
def notification_runner(): def notification_runner():
global notification_debug_log global notification_debug_log
from datetime import datetime from datetime import datetime

@ -49,3 +49,15 @@ class model(dict):
def __init__(self, *arg, **kw): def __init__(self, *arg, **kw):
super(model, self).__init__(*arg, **kw) super(model, self).__init__(*arg, **kw)
self.update(self.base_config) self.update(self.base_config)
def parse_headers_from_text_file(filepath):
headers = {}
with open(filepath, 'r') as f:
for l in f.readlines():
l = l.strip()
if not l.startswith('#') and ':' in l:
(k, v) = l.split(':')
headers[k.strip()] = v.strip()
return headers

@ -473,6 +473,31 @@ class model(dict):
# None is set # None is set
return False return False
def has_extra_headers_file(self):
filepath = os.path.join(self.watch_data_dir, 'headers.txt')
return os.path.isfile(filepath)
def get_all_headers(self):
from .App import parse_headers_from_text_file
headers = self.get('headers', {}).copy()
# Available headers on the disk could 'headers.txt' in the watch data dir
filepath = os.path.join(self.watch_data_dir, 'headers.txt')
try:
if os.path.isfile(filepath):
headers.update(parse_headers_from_text_file(filepath))
except Exception as e:
print(f"ERROR reading headers.txt at {filepath}", str(e))
# Or each by tag, as tagname.txt in the main datadir
for f in self.all_tags:
fname = re.sub(r'[\W_-]', '', f).lower().strip() + ".txt"
filepath = os.path.join(self.__datastore_path, fname)
try:
if os.path.isfile(filepath):
headers.update(parse_headers_from_text_file(filepath))
except Exception as e:
print(f"ERROR reading headers.txt at {filepath}", str(e))
return headers
def get_last_fetched_before_filters(self): def get_last_fetched_before_filters(self):
import brotli import brotli

@ -70,10 +70,9 @@ class perform_site_check(difference_detection_processor):
# Unset any existing notification error # Unset any existing notification error
update_obj = {'last_notification_error': False, 'last_error': False} update_obj = {'last_notification_error': False, 'last_error': False}
extra_headers = watch.get('headers', [])
# Tweak the base config with the per-watch ones # Tweak the base config with the per-watch ones
request_headers = deepcopy(self.datastore.data['settings']['headers']) extra_headers = watch.get_all_headers()
request_headers = self.datastore.get_all_headers()
request_headers.update(extra_headers) request_headers.update(extra_headers)
# https://github.com/psf/requests/issues/4525 # https://github.com/psf/requests/issues/4525

@ -3,7 +3,7 @@ from flask import (
) )
from . model import App, Watch from . model import App, Watch
from copy import deepcopy from copy import deepcopy, copy
from os import path, unlink from os import path, unlink
from threading import Lock from threading import Lock
import json import json
@ -474,8 +474,6 @@ class ChangeDetectionStore:
return proxy_list if len(proxy_list) else None return proxy_list if len(proxy_list) else None
def get_preferred_proxy_for_watch(self, uuid): def get_preferred_proxy_for_watch(self, uuid):
""" """
Returns the preferred proxy by ID key Returns the preferred proxy by ID key
@ -507,6 +505,24 @@ class ChangeDetectionStore:
return None return None
def has_extra_headers_file(self):
filepath = os.path.join(self.datastore_path, 'headers.txt')
return os.path.isfile(filepath)
def get_all_headers(self):
from .model.App import parse_headers_from_text_file
headers = copy(self.data['settings'].get('headers', {}))
filepath = os.path.join(self.datastore_path, 'headers.txt')
try:
if os.path.isfile(filepath):
headers.update(parse_headers_from_text_file(filepath))
except Exception as e:
print(f"ERROR reading headers.txt at {filepath}", str(e))
return headers
# Run all updates # Run all updates
# IMPORTANT - Each update could be run even when they have a new install and the schema is correct # IMPORTANT - Each update could be run even when they have a new install and the schema is correct
# So therefor - each `update_n` should be very careful about checking if it needs to actually run # So therefor - each `update_n` should be very careful about checking if it needs to actually run

Loading…
Cancel
Save