aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--test/requests/link_checker.py10
-rw-r--r--test/requests/mapping_tests.py35
-rw-r--r--test/requests/navigation_tests.py15
-rwxr-xr-xtest/requests/test-website.py2
-rw-r--r--wqflask/base/anon_collection.py3
-rw-r--r--wqflask/utility/elasticsearch_tools.py2
-rw-r--r--wqflask/utility/svg.py2
7 files changed, 51 insertions, 18 deletions
diff --git a/test/requests/link_checker.py b/test/requests/link_checker.py
index 64553ed8..715f330c 100644
--- a/test/requests/link_checker.py
+++ b/test/requests/link_checker.py
@@ -18,6 +18,10 @@ def is_internal_link(link):
pattern = re.compile("^/.*")
return pattern.match(link)
+def is_in_page_link(link):
+ pattern = re.compile("^#.*")
+ return pattern.match(link)
+
def get_links(doc):
return filter(
lambda x: not (
@@ -40,6 +44,7 @@ def verify_link(link):
else:
print("ERROR: link `"+link+"` failed with status "
, result.status_code)
+
if DO_FAIL:
raise Exception("Failed verify")
except ConnectionError as ex:
@@ -52,9 +57,10 @@ def check_page(host, start_url):
print("Checking links host "+host+" in page `"+start_url+"`")
doc = parse(start_url).getroot()
links = get_links(doc)
+ in_page_links = filter(is_in_page_link, links)
internal_links = filter(is_internal_link, links)
- external_links = filter(lambda x: not is_internal_link(x), links)
- # external_links.append("http://somenon-existentsite.brr")
+ external_links = filter(lambda x: not (is_internal_link(x) or is_in_page_link(x)), links)
+
for link in internal_links:
verify_link(host+link)
diff --git a/test/requests/mapping_tests.py b/test/requests/mapping_tests.py
index fd20df11..8eb19de7 100644
--- a/test/requests/mapping_tests.py
+++ b/test/requests/mapping_tests.py
@@ -1,17 +1,10 @@
from __future__ import print_function
import re
+import copy
import json
import requests
from lxml.html import fromstring
-def get_data(list_item):
- try:
- value = list_item[1]
- except:
- value = None
- #print("list_item:", list_item, "==>", value)
- return value
-
def load_data_from_file():
filename = "../test/data/input/mapping/1435395_s_at_HC_M2_0606_P.json"
file_handle = open(filename, "r")
@@ -19,6 +12,8 @@ def load_data_from_file():
return file_data
def check_pylmm_tool_selection(host, data):
+ print("")
+ print("pylmm mapping tool selection")
data["method"] = "pylmm"
page = requests.post(host+"/marker_regression", data=data)
doc = fromstring(page.text)
@@ -27,10 +22,24 @@ def check_pylmm_tool_selection(host, data):
assert form.fields["value:BXD1"] == "15.034" # Check value in the file
def check_R_qtl_tool_selection(host, data):
- pass
+ print("")
+ print("R/qtl mapping tool selection")
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
+ page = requests.post(host+"/marker_regression", data=data, headers=headers)
+ doc = fromstring(page.text)
+ form = doc.forms[1]
+ assert form.fields["dataset"] == "HC_M2_0606_P"
+ assert form.fields["value:BXD1"] == "15.034"
def check_CIM_tool_selection(host, data):
- pass
+ print("")
+ print("CIM mapping tool selection (using reaper)")
+ data["method"] = "reaper"
+ page = requests.post(host+"/marker_regression", data=data)
+ doc = fromstring(page.text)
+ form = doc.forms[1]
+ assert form.fields["dataset"] == "HC_M2_0606_P"
+ assert form.fields["value:BXD1"] == "15.034"
def check_mapping(args_obj, parser):
print("")
@@ -38,6 +47,6 @@ def check_mapping(args_obj, parser):
host = args_obj.host
data = load_data_from_file()
- check_pylmm_tool_selection(host, data)
- check_R_qtl_tool_selection(host, data)
- check_CIM_tool_selection(host, data)
+ check_pylmm_tool_selection(host, copy.deepcopy(data))
+ check_R_qtl_tool_selection(host, copy.deepcopy(data)) ## Why does this fail?
+ check_CIM_tool_selection(host, copy.deepcopy(data))
diff --git a/test/requests/navigation_tests.py b/test/requests/navigation_tests.py
new file mode 100644
index 00000000..eda27324
--- /dev/null
+++ b/test/requests/navigation_tests.py
@@ -0,0 +1,15 @@
+from __future__ import print_function
+import re
+import requests
+from lxml.html import parse
+
+def check_navigation(args_obj, parser):
+ print("")
+ print("Checking navigation.")
+
+ host = args_obj.host
+ url = host + "/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P"
+ print("URL: ", url)
+ page = requests.get(url)
+ # Page is built by the javascript, hence using requests fails for this.
+ # Investigate use of selenium maybe?
diff --git a/test/requests/test-website.py b/test/requests/test-website.py
index 118c9df1..a33fe708 100755
--- a/test/requests/test-website.py
+++ b/test/requests/test-website.py
@@ -7,6 +7,7 @@ from __future__ import print_function
import argparse
from link_checker import check_links
from mapping_tests import check_mapping
+from navigation_tests import check_navigation
from main_web_functionality import check_main_web_functionality
import link_checker
import sys
@@ -63,6 +64,7 @@ parser.add_argument("-m", "--mapping", dest="accumulate"
, action="store_const", const=check_mapping, default=print_help
, help="Checks for mapping.")
+# Navigation tests deactivated since system relies on Javascript
# parser.add_argument("-n", "--navigation", dest="accumulate"
# , action="store_const", const=check_navigation, default=print_help
# , help="Checks for navigation.")
diff --git a/wqflask/base/anon_collection.py b/wqflask/base/anon_collection.py
index 8ee73296..dd1aa27f 100644
--- a/wqflask/base/anon_collection.py
+++ b/wqflask/base/anon_collection.py
@@ -1,6 +1,6 @@
class AnonCollection(TraitCollection):
- def __init__(self, anon_id)
+ def __init__(self, anon_id):
self.anon_id = anon_id
self.collection_members = Redis.smembers(self.anon_id)
print("self.collection_members is:", self.collection_members)
@@ -12,6 +12,7 @@ class AnonCollection(TraitCollection):
print("traits_to_remove:", traits_to_remove)
for trait in traits_to_remove:
Redis.srem(self.anon_id, trait)
+
members_now = self.collection_members - traits_to_remove
print("members_now:", members_now)
print("Went from {} to {} members in set.".format(len(self.collection_members), len(members_now)))
diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py
index 1dba357d..d35cb5ee 100644
--- a/wqflask/utility/elasticsearch_tools.py
+++ b/wqflask/utility/elasticsearch_tools.py
@@ -7,7 +7,7 @@ logger = getLogger(__name__)
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
def test_elasticsearch_connection():
- es = Elasticsearch(['http://'+ELASTICSEARCH_HOST+":"+ELASTICSEARCH_PORT+'/'], verify_certs=True)
+ es = Elasticsearch(['http://'+ELASTICSEARCH_HOST+":"+str(ELASTICSEARCH_PORT)+'/'], verify_certs=True)
if not es.ping():
logger.warning("Elasticsearch is DOWN")
diff --git a/wqflask/utility/svg.py b/wqflask/utility/svg.py
index 512bc9e6..db13b9d1 100644
--- a/wqflask/utility/svg.py
+++ b/wqflask/utility/svg.py
@@ -1029,7 +1029,7 @@ class drawing:
try:
xv.feed(svg)
except:
- raise "SVG is not well formed, see messages above"
+ raise Exception("SVG is not well formed, see messages above")
else:
print "SVG well formed"
if __name__=='__main__':