aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorBonfaceKilz2020-10-27 01:18:38 +0300
committerGitHub2020-10-27 01:18:38 +0300
commit37c391bc62e9080effcf83c6ff0056ab8841b7fb (patch)
tree1e794c5616c25e82869314a2f4e91f64c4d40ea9 /test
parent85896707ef1f9e214b45298f6b5b1a9dc37bc839 (diff)
parentb369489e6c075eee3f58bb33e493c901b052b0a1 (diff)
downloadgenenetwork2-37c391bc62e9080effcf83c6ff0056ab8841b7fb.tar.gz
Merge pull request #422 from BonfaceKilz/build/python3-migration
Build/python3 migration
Diffstat (limited to 'test')
-rw-r--r--test/requests/link_checker.py16
-rw-r--r--test/requests/main_web_functionality.py21
-rw-r--r--test/requests/mapping_tests.py1
-rw-r--r--test/requests/navigation_tests.py1
-rwxr-xr-xtest/requests/test-website.py2
5 files changed, 18 insertions, 23 deletions
diff --git a/test/requests/link_checker.py b/test/requests/link_checker.py
index d040ba54..6ac26ba7 100644
--- a/test/requests/link_checker.py
+++ b/test/requests/link_checker.py
@@ -1,4 +1,3 @@
-from __future__ import print_function
import re
import requests
from lxml.html import parse
@@ -23,12 +22,9 @@ def is_in_page_link(link):
return pattern.match(link)
def get_links(doc):
- return filter(
- lambda x: not (
+ return [x for x in [y.get("href") for y in doc.cssselect("a")] if not (
is_root_link(x)
- or is_mailto_link(x))
- , map(lambda y: y.get("href")
- , doc.cssselect("a")))
+ or is_mailto_link(x))]
def verify_link(link):
if link[0] == "#":
@@ -58,7 +54,7 @@ def verify_static_file(link):
try:
result = requests.get(link, timeout=20, verify=False)
if (result.status_code == 200 and
- result.content.find("Error: 404 Not Found") <= 0):
+ result.content.find(bytes("Error: 404 Not Found", "utf-8")) <= 0):
print(link+" ==> OK")
else:
print("ERROR: link {}".format(link))
@@ -72,9 +68,9 @@ def check_page(host, start_url):
print("Checking links host "+host+" in page `"+start_url+"`")
doc = parse(start_url).getroot()
links = get_links(doc)
- in_page_links = filter(is_in_page_link, links)
- internal_links = filter(is_internal_link, links)
- external_links = filter(lambda x: not (is_internal_link(x) or is_in_page_link(x)), links)
+ in_page_links = list(filter(is_in_page_link, links))
+ internal_links = list(filter(is_internal_link, links))
+ external_links = [x for x in links if not (is_internal_link(x) or is_in_page_link(x))]
for link in internal_links:
verify_link(host+link)
diff --git a/test/requests/main_web_functionality.py b/test/requests/main_web_functionality.py
index d4c3b1ad..28033ad5 100644
--- a/test/requests/main_web_functionality.py
+++ b/test/requests/main_web_functionality.py
@@ -1,9 +1,7 @@
-from __future__ import print_function
-import re
import requests
from lxml.html import parse
from link_checker import check_page
-from requests.exceptions import ConnectionError
+
def check_home(url):
doc = parse(url).getroot()
@@ -13,18 +11,20 @@ def check_home(url):
def check_search_page(host):
data = dict(
- species="mouse"
- , group="BXD"
- , type="Hippocampus mRNA"
- , dataset="HC_M2_0606_P"
- , search_terms_or=""
- , search_terms_and="MEAN=(15 16) LRS=(23 46)")
+ species="mouse",
+ group="BXD",
+ type="Hippocampus mRNA",
+ dataset="HC_M2_0606_P",
+ search_terms_or="",
+ search_terms_and="MEAN=(15 16) LRS=(23 46)")
result = requests.get(host+"/search", params=data)
found = result.text.find("records were found")
assert(found >= 0)
assert(result.status_code == 200)
print("OK")
- check_traits_page(host, "/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")
+ check_traits_page(host, ("/show_trait?trait_id=1435395_"
+ "s_at&dataset=HC_M2_0606_P"))
+
def check_traits_page(host, traits_url):
doc = parse(host+traits_url).getroot()
@@ -33,6 +33,7 @@ def check_traits_page(host, traits_url):
print("OK")
check_page(host, host+traits_url)
+
def check_main_web_functionality(args_obj, parser):
print("")
print("Checking main web functionality...")
diff --git a/test/requests/mapping_tests.py b/test/requests/mapping_tests.py
index 5748a2a3..19b22c21 100644
--- a/test/requests/mapping_tests.py
+++ b/test/requests/mapping_tests.py
@@ -1,4 +1,3 @@
-from __future__ import print_function
import re
import copy
import json
diff --git a/test/requests/navigation_tests.py b/test/requests/navigation_tests.py
index eda27324..6b91c1fd 100644
--- a/test/requests/navigation_tests.py
+++ b/test/requests/navigation_tests.py
@@ -1,4 +1,3 @@
-from __future__ import print_function
import re
import requests
from lxml.html import parse
diff --git a/test/requests/test-website.py b/test/requests/test-website.py
index f90d1843..8bfb47c2 100755
--- a/test/requests/test-website.py
+++ b/test/requests/test-website.py
@@ -3,7 +3,7 @@
# env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003
#
# Mostly to pick up the Guix GN2_PROFILE and python modules
-from __future__ import print_function
+
import argparse
from link_checker import check_links
from link_checker import check_packaged_js_files