about summary refs log tree commit diff
path: root/test/requests/link_checker.py
diff options
context:
space:
mode:
authorzsloan2018-03-29 10:36:12 -0500
committerGitHub2018-03-29 10:36:12 -0500
commitb215b5fe5c6d13f0ed445106230e1e38db71c918 (patch)
tree97f1f47b092bef38856ac34c1bd745e471c38311 /test/requests/link_checker.py
parente67e3a76fca0bad4796853eb58140a412922bc9c (diff)
parente0c706c51c834caa836ecffd27a5d18fc23178ff (diff)
downloadgenenetwork2-b215b5fe5c6d13f0ed445106230e1e38db71c918.tar.gz
Merge pull request #297 from pjotrp/testing
Testing
Diffstat (limited to 'test/requests/link_checker.py')
-rw-r--r--test/requests/link_checker.py63
1 files changed, 63 insertions, 0 deletions
diff --git a/test/requests/link_checker.py b/test/requests/link_checker.py
new file mode 100644
index 00000000..256bf6ef
--- /dev/null
+++ b/test/requests/link_checker.py
@@ -0,0 +1,63 @@
+from __future__ import print_function
+import re
+import requests
+from lxml.html import parse
+from requests.exceptions import ConnectionError
+
+def is_root_link(link):
+    pattern = re.compile("^/$")
+    return pattern.match(link)
+
+def is_mailto_link(link):
+    pattern = re.compile("^mailto:.*")
+    return pattern.match(link)
+
+def is_internal_link(link):
+    pattern = re.compile("^/.*")
+    return pattern.match(link)
+
+def get_links(doc):
+    return filter(
+        lambda x: not (
+            is_root_link(x)
+            or is_mailto_link(x))
+        , map(lambda y: y.get("href")
+              , doc.cssselect("a")))
+
+def verify_link(link):
+    try:
+        result = requests.get(link, timeout=20)
+        if result.status_code == 200:
+            print(link+" ==> OK")
+        else:
+            print("ERROR: link `"+link+"` failed with status "
+                  , result.status_code)
+    except ConnectionError as ex:
+        print("ERROR: ", link, ex)
+
+def check_page(host, start_url):
+    print("")
+    print("Checking links in page `"+start_url+"`")
+    doc = parse(start_url).getroot()
+    links = get_links(doc)
+    internal_links = filter(is_internal_link, links)
+    external_links = filter(lambda x: not is_internal_link(x), links)
+    external_links.append("http://somenon-existentsite.brr")
+    for link in internal_links:
+        verify_link(host+link)
+
+    for link in external_links:
+        verify_link(link)
+
+def check_links(args_obj, parser):
+    print("")
+    print("Checking links")
+    host = args_obj.host
+
+    # Check the home page
+    check_page(host, host)
+
+    # Check traits page
+    check_page(
+        host,
+        host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")