#!/usr/bin/python3 import time from flask import url_for from . util import live_server_setup from ..html_tools import * def test_setup(live_server): live_server_setup(live_server) def set_original_response(): test_return_data = """ Some initial text

Which is across multiple lines


So let's see what happens.
Some text thats the same
Some text that will change
""" with open("test-datastore/endpoint-content.txt", "w") as f: f.write(test_return_data) return None def set_modified_response(): test_return_data = """ Some initial text

Which is across multiple lines


So let's see what happens. THIS CHANGES AND SHOULDNT TRIGGER A CHANGE
Some text thats the same
Some new text
""" with open("test-datastore/endpoint-content.txt", "w") as f: f.write(test_return_data) return None # Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613 def test_check_xpath_filter_utf8(client, live_server): filter='//item/*[self::description]' d=''' rpilocator.com https://rpilocator.com Find Raspberry Pi Computers in Stock Thu, 19 May 2022 23:27:30 GMT https://rpilocator.com/favicon.png rpilocator.com https://rpilocator.com/ 32 32 Stock Alert (UK): RPi CM4 - 1GB RAM, No MMC, No Wifi is In Stock at Pimoroni Stock Alert (UK): RPi CM4 - 1GB RAM, No MMC, No Wifi is In Stock at Pimoroni https://rpilocator.com?vendor=pimoroni&utm_source=feed&utm_medium=rss pimoroni UK CM4 F9FAB0D9-DF6F-40C8-8DEE5FC0646BB722 Thu, 19 May 2022 14:32:32 GMT ''' with open("test-datastore/endpoint-content.txt", "w") as f: f.write(d) # Add our URL to the import page test_url = url_for('test_endpoint', _external=True, content_type="application/rss+xml;charset=UTF-8") res = client.post( url_for("import_page"), data={"urls": test_url}, follow_redirects=True ) assert b"1 Imported" in res.data time.sleep(1) res = client.post( url_for("edit_page", uuid="first"), data={"include_filters": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"}, follow_redirects=True ) assert b"Updated watch." in res.data time.sleep(3) res = client.get(url_for("index")) assert b'Unicode strings with encoding declaration are not supported.' not in res.data res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) assert b'Deleted' in res.data # Handle utf-8 charset replies https://github.com/dgtlmoon/changedetection.io/pull/613 def test_check_xpath_text_function_utf8(client, live_server): filter='//item/title/text()' d=''' rpilocator.com https://rpilocator.com Find Raspberry Pi Computers in Stock Thu, 19 May 2022 23:27:30 GMT https://rpilocator.com/favicon.png rpilocator.com https://rpilocator.com/ 32 32 Stock Alert (UK): RPi CM4 something else unrelated Stock Alert (UK): Big monitor something else unrelated ''' with open("test-datastore/endpoint-content.txt", "w") as f: f.write(d) # Add our URL to the import page test_url = url_for('test_endpoint', _external=True, content_type="application/rss+xml;charset=UTF-8") res = client.post( url_for("import_page"), data={"urls": test_url}, follow_redirects=True ) assert b"1 Imported" in res.data time.sleep(1) res = client.post( url_for("edit_page", uuid="first"), data={"include_filters": filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"}, follow_redirects=True ) assert b"Updated watch." in res.data time.sleep(3) res = client.get(url_for("index")) assert b'Unicode strings with encoding declaration are not supported.' not in res.data # The service should echo back the request headers res = client.get( url_for("preview_page", uuid="first"), follow_redirects=True ) assert b'
Stock Alert (UK): RPi CM4' in res.data assert b'
Stock Alert (UK): Big monitor' in res.data res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) assert b'Deleted' in res.data def test_check_markup_xpath_filter_restriction(client, live_server): sleep_time_for_fetch_thread = 3 xpath_filter = "//*[contains(@class, 'sametext')]" set_original_response() # Give the endpoint time to spin up time.sleep(1) # Add our URL to the import page test_url = url_for('test_endpoint', _external=True) res = client.post( url_for("import_page"), data={"urls": test_url}, follow_redirects=True ) assert b"1 Imported" in res.data # Give the thread time to pick it up time.sleep(sleep_time_for_fetch_thread) # Goto the edit page, add our ignore text # Add our URL to the import page res = client.post( url_for("edit_page", uuid="first"), data={"include_filters": xpath_filter, "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"}, follow_redirects=True ) assert b"Updated watch." in res.data # Give the thread time to pick it up time.sleep(sleep_time_for_fetch_thread) # view it/reset state back to viewed client.get(url_for("diff_history_page", uuid="first"), follow_redirects=True) # Make a change set_modified_response() # Trigger a check client.get(url_for("form_watch_checknow"), follow_redirects=True) # Give the thread time to pick it up time.sleep(sleep_time_for_fetch_thread) res = client.get(url_for("index")) assert b'unviewed' not in res.data res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) assert b'Deleted' in res.data def test_xpath_validation(client, live_server): # Give the endpoint time to spin up time.sleep(1) # Add our URL to the import page test_url = url_for('test_endpoint', _external=True) res = client.post( url_for("import_page"), data={"urls": test_url}, follow_redirects=True ) assert b"1 Imported" in res.data time.sleep(2) res = client.post( url_for("edit_page", uuid="first"), data={"include_filters": "/something horrible", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"}, follow_redirects=True ) assert b"is not a valid XPath expression" in res.data res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) assert b'Deleted' in res.data # actually only really used by the distll.io importer, but could be handy too def test_check_with_prefix_include_filters(client, live_server): res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) assert b'Deleted' in res.data # Give the endpoint time to spin up time.sleep(1) set_original_response() # Add our URL to the import page test_url = url_for('test_endpoint', _external=True) res = client.post( url_for("import_page"), data={"urls": test_url}, follow_redirects=True ) assert b"1 Imported" in res.data time.sleep(3) res = client.post( url_for("edit_page", uuid="first"), data={"include_filters": "xpath://*[contains(@class, 'sametext')]", "url": test_url, "tag": "", "headers": "", 'fetch_backend': "html_requests"}, follow_redirects=True ) assert b"Updated watch." in res.data time.sleep(3) res = client.get( url_for("preview_page", uuid="first"), follow_redirects=True ) assert b"Some text thats the same" in res.data #in selector assert b"Some text that will change" not in res.data #not in selector client.get(url_for("form_delete", uuid="all"), follow_redirects=True)