about summary refs log tree commit diff
path: root/test/requests/link_checker.py
diff options
context:
space:
mode:
authorzsloan2018-03-29 15:46:45 +0000
committerzsloan2018-03-29 15:46:45 +0000
commitfef4b723d0e9d9d0b5f40bd51c6a2cd31410285b (patch)
tree22268c6f4db65f637070319a497de24b82843f0f /test/requests/link_checker.py
parent6ff7df2360d1a6d0461980c938809165982583e8 (diff)
parentb215b5fe5c6d13f0ed445106230e1e38db71c918 (diff)
downloadgenenetwork2-fef4b723d0e9d9d0b5f40bd51c6a2cd31410285b.tar.gz
Resolved conflict in views.py
Diffstat (limited to 'test/requests/link_checker.py')
-rw-r--r--test/requests/link_checker.py63
1 files changed, 63 insertions, 0 deletions
diff --git a/test/requests/link_checker.py b/test/requests/link_checker.py
new file mode 100644
index 00000000..256bf6ef
--- /dev/null
+++ b/test/requests/link_checker.py
@@ -0,0 +1,63 @@
+from __future__ import print_function
+import re
+import requests
+from lxml.html import parse
+from requests.exceptions import ConnectionError
+
+def is_root_link(link):
+    pattern = re.compile("^/$")
+    return pattern.match(link)
+
+def is_mailto_link(link):
+    pattern = re.compile("^mailto:.*")
+    return pattern.match(link)
+
+def is_internal_link(link):
+    pattern = re.compile("^/.*")
+    return pattern.match(link)
+
+def get_links(doc):
+    return filter(
+        lambda x: not (
+            is_root_link(x)
+            or is_mailto_link(x))
+        , map(lambda y: y.get("href")
+              , doc.cssselect("a")))
+
+def verify_link(link):
+    try:
+        result = requests.get(link, timeout=20)
+        if result.status_code == 200:
+            print(link+" ==> OK")
+        else:
+            print("ERROR: link `"+link+"` failed with status "
+                  , result.status_code)
+    except ConnectionError as ex:
+        print("ERROR: ", link, ex)
+
+def check_page(host, start_url):
+    print("")
+    print("Checking links in page `"+start_url+"`")
+    doc = parse(start_url).getroot()
+    links = get_links(doc)
+    internal_links = filter(is_internal_link, links)
+    external_links = filter(lambda x: not is_internal_link(x), links)
+    external_links.append("http://somenon-existentsite.brr")
+    for link in internal_links:
+        verify_link(host+link)
+
+    for link in external_links:
+        verify_link(link)
+
+def check_links(args_obj, parser):
+    print("")
+    print("Checking links")
+    host = args_obj.host
+
+    # Check the home page
+    check_page(host, host)
+
+    # Check traits page
+    check_page(
+        host,
+        host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")