diff options
author | Arthur Centeno | 2021-04-09 20:38:21 +0000 |
---|---|---|
committer | Arthur Centeno | 2021-04-09 20:38:21 +0000 |
commit | e2b04a322f26670782fe7f7c39bcebc508fdabdd (patch) | |
tree | a51c32bae4d544cc0beea19f455ccc52f0544a4c /test | |
parent | 187cd40bd3273b50d2813bfccf98bfadbb8c14ff (diff) | |
parent | ef51e08753defdfc7f3e67f8788cd1362d2cf631 (diff) | |
download | genenetwork2-e2b04a322f26670782fe7f7c39bcebc508fdabdd.tar.gz |
Merge branch 'testing' of github.com:genenetwork/genenetwork2 into acenteno
Diffstat (limited to 'test')
-rw-r--r-- | test/__init__.py | 0 | ||||
-rw-r--r-- | test/requests/link_checker.py | 92 | ||||
-rw-r--r-- | test/requests/links_scraper/genelinks.py | 133 | ||||
-rw-r--r-- | test/requests/main_web_functionality.py | 24 | ||||
-rw-r--r-- | test/requests/mapping_tests.py | 13 | ||||
-rw-r--r-- | test/requests/navigation_tests.py | 1 | ||||
-rwxr-xr-x | test/requests/test-website.py | 15 | ||||
-rw-r--r-- | test/unittest/test_registration.py | 27 |
8 files changed, 239 insertions, 66 deletions
diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/test/__init__.py diff --git a/test/requests/link_checker.py b/test/requests/link_checker.py index 715f330c..feae6526 100644 --- a/test/requests/link_checker.py +++ b/test/requests/link_checker.py @@ -1,4 +1,3 @@ -from __future__ import print_function import re import requests from lxml.html import parse @@ -23,12 +22,9 @@ def is_in_page_link(link): return pattern.match(link) def get_links(doc): - return filter( - lambda x: not ( + return [x for x in [y.get("href") for y in doc.cssselect("a")] if not ( is_root_link(x) - or is_mailto_link(x)) - , map(lambda y: y.get("href") - , doc.cssselect("a"))) + or is_mailto_link(x))] def verify_link(link): if link[0] == "#": @@ -52,14 +48,29 @@ def verify_link(link): if DO_FAIL: raise ex + +def verify_static_file(link): + print("verifying "+link) + try: + result = requests.get(link, timeout=20, verify=False) + if (result.status_code == 200 and + result.content.find(bytes("Error: 404 Not Found", "utf-8")) <= 0): + print(link+" ==> OK") + else: + print("ERROR: link {}".format(link)) + raise Exception("Failed verify") + except ConnectionError as ex: + print("ERROR: ", link, ex) + + def check_page(host, start_url): print("") print("Checking links host "+host+" in page `"+start_url+"`") doc = parse(start_url).getroot() links = get_links(doc) - in_page_links = filter(is_in_page_link, links) - internal_links = filter(is_internal_link, links) - external_links = filter(lambda x: not (is_internal_link(x) or is_in_page_link(x)), links) + in_page_links = list(filter(is_in_page_link, links)) + internal_links = list(filter(is_internal_link, links)) + external_links = [x for x in links if not (is_internal_link(x) or is_in_page_link(x))] for link in internal_links: verify_link(host+link) @@ -79,3 +90,66 @@ def check_links(args_obj, parser): check_page( host, host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P") + + +def check_packaged_js_files(args_obj, parser): + host = args_obj.host + js_files = [ + # Datatables Extensions: + "/css/DataTablesExtensions/buttonsBootstrap/css/buttons.bootstrap.css", + "/js/DataTablesExtensions/buttons/js/dataTables.buttons.min.js", + "/css/DataTablesExtensions/buttonStyles/css/buttons.dataTables.min.css", + "/js/DataTablesExtensions/buttons/js/dataTables.buttons.min.js", + "/js/DataTablesExtensions/colResize/dataTables.colResize.js", + "/js/DataTablesExtensions/colReorder/js/dataTables.colReorder.js", + "/js/DataTablesExtensions/buttons/js/buttons.colVis.min.js", + "/js/DataTablesExtensions/scroller/js/dataTables.scroller.min.js", + "/js/DataTables/js/jquery.dataTables.js", + "/js/DataTablesExtensions/scrollerStyle/css/scroller.dataTables.min.css", + # Datatables plugins: + "/js/DataTablesExtensions/plugins/sorting/natural.js", + "/js/DataTablesExtensions/plugins/sorting/scientific.js", + # Other js libraries + "/js/chroma/chroma.min.js", + "/js/d3-tip/d3-tip.js", + "/js/d3js/d3.min.js", + "/js/js_alt/underscore.min.js", + "/js/nvd3/nv.d3.min.css", + "/js/qtip2/jquery.qtip.min.js", + "/js/js_alt/md5.min.js", + "/js/bootstrap/js/bootstrap.min.js", + "/css/bootstrap/css/bootstrap.css", + "/js/jquery-ui/jquery-ui.min.js", + "/js/jquery-cookie/jquery.cookie.js", + "/js/jquery/jquery.min.js", + "/js/typeahead/typeahead.bundle.js", + "/js/underscore-string/underscore.string.min.js", + "/js/js_alt/jstat.min.js", + "/js/js_alt/parsley.min.js", + "/js/js_alt/timeago.min.js", + "/js/plotly/plotly.min.js", + "/js/ckeditor/ckeditor.js", + "/js/jszip/jszip.min.js", + "/js/jscolor/jscolor.js", + "/js/DataTables/js/jquery.js", + "/css/DataTables/css/jquery.dataTables.css", + "/js/colorbox/jquery.colorbox-min.js", + "/css/nouislider/nouislider.min.css", + "/js/nouislider/nouislider.js", + "/js/purescript-genome-browser/js/purescript-genetics-browser.js", + "/js/purescript-genome-browser/css/purescript-genetics-browser.css", + "/js/cytoscape/cytoscape.min.js", + "/js/cytoscape-panzoom/cytoscape-panzoom.js", + "/js/cytoscape-panzoom/cytoscape.js-panzoom.css", + "/js/cytoscape-qtip/cytoscape-qtip.js", + "/css/d3-tip/d3-tip.css", + "/js/zxcvbn/zxcvbn.js", + "/js/javascript-twitter-post-fetcher/js/twitterFetcher_min.js", + "/js/DataTables/images/sort_asc_disabled.png", + "/js/DataTables/images/sort_desc_disabled.png", + "/js/shapiro-wilk/shapiro-wilk.js", + ] + + print("Checking links") + for link in js_files: + verify_static_file(host+link) diff --git a/test/requests/links_scraper/genelinks.py b/test/requests/links_scraper/genelinks.py new file mode 100644 index 00000000..12300f4a --- /dev/null +++ b/test/requests/links_scraper/genelinks.py @@ -0,0 +1,133 @@ +import re +import requests +import urllib3 +import os +import logging + +from urllib.request import urlopen as uReq +from bs4 import BeautifulSoup as soup +from urllib.parse import urljoin +from urllib.parse import urlparse + + +PORT = os.environ.get("PORT", "5004") +TEMPLATE_PATH = "../wqflask/wqflask/templates" + +BROKEN_LINKS = set() + + +def search_templates(): + """searches for broken links in templates""" + html_parsed_pages = [] + for subdir, dirs, files in os.walk(TEMPLATE_PATH): + for file in files: + file_path = os.path.join(subdir, file) + if file_path.endswith(".html"): + parsed_page = soup( + open(file_path, encoding="utf8"), "html.parser") + html_parsed_pages.append(parsed_page) + + return html_parsed_pages + + +def is_valid_link(url_link): + try: + result = urlparse(url_link) + return all([result.scheme, result.netloc, result.path]) + except Exception as e: + return False + + +def test_link(link): + print(f'Checking -->{link}') + results = None + try: + + results = requests.get(link, verify=False, timeout=10) + status_code = results.status_code + + except Exception as e: + status_code = 408 + + return int(status_code) > 403 + + +def fetch_css_links(parsed_page): + print("fetching css links") + for link in parsed_page.findAll("link"): + full_path = None + + link_url = link.attrs.get("href") + if is_valid_link(link_url): + full_path = link_url + + elif re.match(r"^/css", link_url) or re.match(r"^/js", link_url): + full_path = urljoin('http://localhost:5004/', link_url) + + if full_path is not None: + if test_link(full_path): + BROKEN_LINKS.add(full_path) + + +def fetch_html_links(parsed_page): + print("fetching a tags ") + + for link in parsed_page.findAll("a"): + full_path = None + link_url = link.attrs.get("href") + if re.match(r"^/", link_url): + full_path = urljoin('http://localhost:5004/', link_url) + + elif is_valid_link(link_url): + full_path = link_url + + if full_path is not None: + if test_link(full_path): + BROKEN_LINKS.add(full_path) + + +def fetch_script_tags(parsed_page): + print("--->fetching js links") + for link in parsed_page.findAll("script"): + js_link = link.attrs.get("src") + if js_link is not None: + if is_valid_link(js_link): + raise SystemExit("Failed,the library should be packaged in guix.\ + Please contact,http://genenetwork.org/ for more details") + + elif re.match(r"^/css", js_link) or re.match(r"^/js", js_link): + full_path = urljoin('http://localhost:5004/', js_link) + if test_link(full_path): + BROKEN_LINKS.add(full_path) + + +def fetch_page_links(page_url): + + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + html_page = uReq(page_url) + parsed_page = soup(html_page, "html.parser") + + fetch_script_tags(parsed_page=parsed_page) + fetch_css_links(parsed_page=parsed_page) + fetch_html_links(parsed_page=parsed_page) + + +def webpages_to_check(): + pages = [f"http://localhost:{PORT}/"] + + return pages + + +if __name__ == '__main__': + # results = search_templates() + + for page in webpages_to_check(): + fetch_page_links(page) + if len(BROKEN_LINKS) > 0: + print("THE LINKS BELOW ARE BROKEN>>>>>>>>>>>>>") + for link in BROKEN_LINKS: + print(link) + + if len(BROKEN_LINKS) > 0: + raise SystemExit( + "The links Above are broken.Please contact genenetwork.org<<<<<<<<") diff --git a/test/requests/main_web_functionality.py b/test/requests/main_web_functionality.py index 7b89b833..28033ad5 100644 --- a/test/requests/main_web_functionality.py +++ b/test/requests/main_web_functionality.py @@ -1,9 +1,7 @@ -from __future__ import print_function -import re import requests from lxml.html import parse from link_checker import check_page -from requests.exceptions import ConnectionError + def check_home(url): doc = parse(url).getroot() @@ -13,17 +11,20 @@ def check_home(url): def check_search_page(host): data = dict( - species="mouse" - , group="BXD" - , type="Hippocampus mRNA" - , dataset="HC_M2_0606_P" - , search_terms_or="" - , search_terms_and="MEAN=(15 16) LRS=(23 46)") + species="mouse", + group="BXD", + type="Hippocampus mRNA", + dataset="HC_M2_0606_P", + search_terms_or="", + search_terms_and="MEAN=(15 16) LRS=(23 46)") result = requests.get(host+"/search", params=data) - found = result.text.find("/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P") + found = result.text.find("records were found") assert(found >= 0) + assert(result.status_code == 200) print("OK") - check_traits_page(host, "/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P") + check_traits_page(host, ("/show_trait?trait_id=1435395_" + "s_at&dataset=HC_M2_0606_P")) + def check_traits_page(host, traits_url): doc = parse(host+traits_url).getroot() @@ -32,6 +33,7 @@ def check_traits_page(host, traits_url): print("OK") check_page(host, host+traits_url) + def check_main_web_functionality(args_obj, parser): print("") print("Checking main web functionality...") diff --git a/test/requests/mapping_tests.py b/test/requests/mapping_tests.py index 6de81bfe..19b22c21 100644 --- a/test/requests/mapping_tests.py +++ b/test/requests/mapping_tests.py @@ -1,4 +1,3 @@ -from __future__ import print_function import re import copy import json @@ -15,9 +14,9 @@ def check_R_qtl_tool_selection(host, data): print("") print("R/qtl mapping tool selection") headers = {"Content-Type": "application/x-www-form-urlencoded"} - page = requests.post(host+"/marker_regression", data=data, headers=headers) + page = requests.post(host+"/loading", data=data, headers=headers) doc = fromstring(page.text) - form = doc.forms[1] + form = doc.forms[0] assert form.fields["dataset"] == "HC_M2_0606_P" assert form.fields["value:BXD1"] == "15.034" @@ -25,9 +24,9 @@ def check_CIM_tool_selection(host, data): print("") print("CIM mapping tool selection (using reaper)") data["method"] = "reaper" - page = requests.post(host+"/marker_regression", data=data) + page = requests.post(host+"/loading", data=data) doc = fromstring(page.text) - form = doc.forms[1] + form = doc.forms[0] assert form.fields["dataset"] == "HC_M2_0606_P" assert form.fields["value:BXD1"] == "15.034" @@ -37,6 +36,6 @@ def check_mapping(args_obj, parser): host = args_obj.host data = load_data_from_file() - check_pylmm_tool_selection(host, copy.deepcopy(data)) - check_R_qtl_tool_selection(host, copy.deepcopy(data)) ## Why does this fail? + # check_pylmm_tool_selection(host, copy.deepcopy(data)) ## Not defined + check_R_qtl_tool_selection(host, copy.deepcopy(data)) check_CIM_tool_selection(host, copy.deepcopy(data)) diff --git a/test/requests/navigation_tests.py b/test/requests/navigation_tests.py index eda27324..6b91c1fd 100644 --- a/test/requests/navigation_tests.py +++ b/test/requests/navigation_tests.py @@ -1,4 +1,3 @@ -from __future__ import print_function import re import requests from lxml.html import parse diff --git a/test/requests/test-website.py b/test/requests/test-website.py index b2e09bc4..8bfb47c2 100755 --- a/test/requests/test-website.py +++ b/test/requests/test-website.py @@ -3,9 +3,10 @@ # env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003 # # Mostly to pick up the Guix GN2_PROFILE and python modules -from __future__ import print_function + import argparse from link_checker import check_links +from link_checker import check_packaged_js_files from mapping_tests import check_mapping from navigation_tests import check_navigation from main_web_functionality import check_main_web_functionality @@ -30,6 +31,7 @@ def run_all(args_obj, parser): link_checker.DO_FAIL = args_obj.fail check_main_web_functionality(args_obj, parser) check_links(args_obj, parser) + check_packaged_js_files(args_obj, parser) check_mapping(args_obj, parser) # TODO: Add other functions as they are created. @@ -104,16 +106,7 @@ parser.add_argument("-i", "--integration-tests", dest="accumulate" , action="store_const", const=integration_tests, default=print_help , help="Runs integration tests.") -# Navigation tests deactivated since system relies on Javascript -# parser.add_argument("-n", "--navigation", dest="accumulate" -# , action="store_const", const=check_navigation, default=print_help -# , help="Checks for navigation.") - -# parser.add_argument("-s", "--skip-broken", dest="accumulate" -# , action="store_const", const=dummy, default=print_help -# , help="Skip tests that are known to be broken.") - args = parser.parse_args() -# print("The arguments object: ", args) + args.accumulate(args, parser) diff --git a/test/unittest/test_registration.py b/test/unittest/test_registration.py deleted file mode 100644 index 98d0cdff..00000000 --- a/test/unittest/test_registration.py +++ /dev/null @@ -1,27 +0,0 @@ -# Run test with something like -# -# env GN2_PROFILE=~/opt/gn-latest GENENETWORK_FILES=$HOME/gn2_data ./bin/genenetwork2 ./etc/default_settings.py -c ../test/unittest/test_registration.py -# - -import unittest -import mock.es_double as es -from wqflask.user_manager import RegisterUser - -class TestRegisterUser(unittest.TestCase): - def setUp(self): - self.es = es.ESDouble() - - def testRegisterUserWithCorrectData(self): - data = { - "email_address": "user@example.com" - , "full_name": "A.N. Other" - , "organization": "Some Organisation" - , "password": "testing" - , "password_confirm": "testing" - , "es_connection": self.es - } - result = RegisterUser(data) - self.assertEqual(len(result.errors), 0, "Errors were not expected") - -if __name__ == "__main__": - unittest.main() |