You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
216 lines
7.3 KiB
216 lines
7.3 KiB
#!/usr/bin/python3
|
|
|
|
import time
|
|
from flask import url_for
|
|
from urllib.request import urlopen
|
|
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI
|
|
|
|
sleep_time_for_fetch_thread = 3
|
|
|
|
|
|
# Basic test to check inscriptus is not adding return line chars, basically works etc
|
|
def test_inscriptus():
|
|
from inscriptis import get_text
|
|
html_content = "<html><body>test!<br/>ok man</body></html>"
|
|
stripped_text_from_html = get_text(html_content)
|
|
assert stripped_text_from_html == 'test!\nok man'
|
|
|
|
def test_setup(client, live_server):
|
|
live_server_setup(live_server)
|
|
|
|
# Assert that non-200's dont give notifications or register as a change
|
|
def test_non_200_doesnt_trigger_change(client, live_server):
|
|
|
|
set_original_response()
|
|
#live_server_setup(live_server)
|
|
# Add our URL to the import page
|
|
res = client.post(
|
|
url_for("import_page"),
|
|
data={"urls": url_for('test_changing_status_code_endpoint', _external=True)},
|
|
follow_redirects=True
|
|
)
|
|
|
|
assert b"1 Imported" in res.data
|
|
|
|
time.sleep(sleep_time_for_fetch_thread)
|
|
|
|
res = client.post(
|
|
url_for("edit_page", uuid="first"),
|
|
data={
|
|
"include_filters": ".foobar-detection",
|
|
"fetch_backend": "html_requests",
|
|
"headers": "",
|
|
"proxy": "proxy-two",
|
|
"tag": "",
|
|
"url": url_for('test_changing_status_code_endpoint', _external=True),
|
|
},
|
|
follow_redirects=True
|
|
)
|
|
|
|
# A recheck will happen here automatically
|
|
time.sleep(sleep_time_for_fetch_thread)
|
|
|
|
# hit the mark all viewed link
|
|
res = client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
|
|
|
|
|
# Now be sure the filter is missing and then recheck it
|
|
set_modified_response()
|
|
|
|
# https://github.com/dgtlmoon/changedetection.io/issues/962#issuecomment-1416807742
|
|
for ecode in ['429', '400', '429', '403', '404', '500']:
|
|
with open("test-endpoint-status-code.txt", 'w') as f:
|
|
f.write(ecode)
|
|
|
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
|
assert b'1 watches queued for rechecking.' in res.data
|
|
time.sleep(sleep_time_for_fetch_thread)
|
|
|
|
# No change should be seen/no trigger of change
|
|
res = client.get(url_for("index"))
|
|
assert b'unviewed' not in res.data
|
|
|
|
# load preview page so we can see what was returned
|
|
res = client.get(url_for("preview_page", uuid="first"))
|
|
# with open('/tmp/debug-'+ecode+'.html', 'wb') as f:
|
|
# f.write(res.data)
|
|
|
|
# Should still say the original 200, because "ignore_status_codes" should be off by default
|
|
# commented out - this will fail because we also show what the error was
|
|
#assert b'code: '+ecode.encode('utf-8') not in res.data
|
|
|
|
assert b'code: 200' in res.data
|
|
|
|
# Cleanup everything
|
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
|
assert b'Deleted' in res.data
|
|
|
|
def test_check_basic_change_detection_functionality(client, live_server):
|
|
set_original_response()
|
|
|
|
# Add our URL to the import page
|
|
res = client.post(
|
|
url_for("import_page"),
|
|
data={"urls": url_for('test_endpoint', _external=True)},
|
|
follow_redirects=True
|
|
)
|
|
|
|
assert b"1 Imported" in res.data
|
|
|
|
time.sleep(sleep_time_for_fetch_thread)
|
|
|
|
# Do this a few times.. ensures we dont accidently set the status
|
|
for n in range(3):
|
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
|
|
|
# Give the thread time to pick it up
|
|
wait_for_all_checks(client)
|
|
|
|
# It should report nothing found (no new 'unviewed' class)
|
|
res = client.get(url_for("index"))
|
|
assert b'unviewed' not in res.data
|
|
assert b'test-endpoint' in res.data
|
|
|
|
# Default no password set, this stuff should be always available.
|
|
|
|
assert b"SETTINGS" in res.data
|
|
assert b"BACKUP" in res.data
|
|
assert b"IMPORT" in res.data
|
|
|
|
#####################
|
|
|
|
# Check HTML conversion detected and workd
|
|
res = client.get(
|
|
url_for("preview_page", uuid="first"),
|
|
follow_redirects=True
|
|
)
|
|
# Check this class does not appear (that we didnt see the actual source)
|
|
assert b'foobar-detection' not in res.data
|
|
|
|
# Make a change
|
|
set_modified_response()
|
|
|
|
res = urlopen(url_for('test_endpoint', _external=True))
|
|
assert b'which has this one new line' in res.read()
|
|
|
|
# Force recheck
|
|
res = client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
|
assert b'1 watches queued for rechecking.' in res.data
|
|
|
|
wait_for_all_checks(client)
|
|
|
|
# Now something should be ready, indicated by having a 'unviewed' class
|
|
res = client.get(url_for("index"))
|
|
assert b'unviewed' in res.data
|
|
|
|
# #75, and it should be in the RSS feed
|
|
rss_token = extract_rss_token_from_UI(client)
|
|
res = client.get(url_for("rss", token=rss_token, _external=True))
|
|
expected_url = url_for('test_endpoint', _external=True)
|
|
assert b'<rss' in res.data
|
|
|
|
# re #16 should have the diff in here too
|
|
assert b'(into ) which has this one new line' in res.data
|
|
assert b'CDATA' in res.data
|
|
|
|
assert expected_url.encode('utf-8') in res.data
|
|
|
|
# Following the 'diff' link, it should no longer display as 'unviewed' even after we recheck it a few times
|
|
res = client.get(url_for("diff_history_page", uuid="first"))
|
|
assert b'Compare newest' in res.data
|
|
|
|
# Check the [preview] pulls the right one
|
|
res = client.get(
|
|
url_for("preview_page", uuid="first"),
|
|
follow_redirects=True
|
|
)
|
|
assert b'which has this one new line' in res.data
|
|
assert b'Which is across multiple lines' not in res.data
|
|
|
|
wait_for_all_checks(client)
|
|
|
|
# Do this a few times.. ensures we dont accidently set the status
|
|
for n in range(2):
|
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
|
|
|
# Give the thread time to pick it up
|
|
wait_for_all_checks(client)
|
|
|
|
# It should report nothing found (no new 'unviewed' class)
|
|
res = client.get(url_for("index"))
|
|
assert b'unviewed' not in res.data
|
|
assert b'Mark all viewed' not in res.data
|
|
assert b'head title' not in res.data # Should not be present because this is off by default
|
|
assert b'test-endpoint' in res.data
|
|
|
|
set_original_response()
|
|
|
|
# Enable auto pickup of <title> in settings
|
|
res = client.post(
|
|
url_for("settings_page"),
|
|
data={"application-extract_title_as_title": "1", "requests-time_between_check-minutes": 180,
|
|
'application-fetch_backend': "html_requests"},
|
|
follow_redirects=True
|
|
)
|
|
|
|
client.get(url_for("form_watch_checknow"), follow_redirects=True)
|
|
wait_for_all_checks(client)
|
|
|
|
res = client.get(url_for("index"))
|
|
assert b'unviewed' in res.data
|
|
assert b'Mark all viewed' in res.data
|
|
|
|
# It should have picked up the <title>
|
|
assert b'head title' in res.data
|
|
|
|
# hit the mark all viewed link
|
|
res = client.get(url_for("mark_all_viewed"), follow_redirects=True)
|
|
|
|
assert b'Mark all viewed' not in res.data
|
|
assert b'unviewed' not in res.data
|
|
|
|
#
|
|
# Cleanup everything
|
|
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
|
assert b'Deleted' in res.data
|