aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2022-06-21 12:28:02 +0300
committerFrederick Muriuki Muriithi2022-06-21 12:28:02 +0300
commit3bd3049fd403886a9653aa7c2fbd1639926420ea (patch)
tree1265c9561832289e2c58025a25738925191f8018 /tests
parentdf0ccad74fa18d43b23cabe45a9bf268459e4151 (diff)
downloadgn-uploader-3bd3049fd403886a9653aa7c2fbd1639926420ea.tar.gz
Test the progress indication feature
Diffstat (limited to 'tests')
-rw-r--r--tests/conftest.py83
-rw-r--r--tests/qc_app/test_parse.py15
-rw-r--r--tests/qc_app/test_progress_indication.py109
3 files changed, 198 insertions, 9 deletions
diff --git a/tests/conftest.py b/tests/conftest.py
index 9755a47..90d8264 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -5,10 +5,13 @@ import socket
import subprocess
from contextlib import closing
+import redis
import pytest
+import jsonpickle
from qc_app import create_app
from quality_control.parsing import strain_names
+from quality_control.errors import InvalidValue, DuplicateHeading
@pytest.fixture(scope="session")
def strains():
@@ -44,3 +47,83 @@ def uploadable_file_object(filename):
"Return an 'object' representing the file to be uploaded."
with open(f"tests/test_data/{filename}", "br") as the_file:
return (io.BytesIO(the_file.read()), filename)
+
+@pytest.fixture(scope="function")
+def job_id():
+ "Return a default UUID4 string as the 'job_id' for test purposes"
+ return "934c55d8-396e-4959-90e1-2698e9205758"
+
+@pytest.fixture(scope="function")
+def redis_conn_with_fresh_job(redis_url, job_id): # pylint: disable=[redefined-outer-name]
+ "redis connection with fresh, unprocessed job"
+ the_job = {
+ "job_id": job_id, "command": "some_test_command", "status": "pending",
+ "filename": "/path/to/some/file.tsv", "percent": 0
+ }
+ with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
+ rconn.hset(name=job_id, mapping=the_job)
+ yield rconn
+ rconn.hdel(job_id, *the_job.keys())
+ rconn.delete(job_id)
+
+@pytest.fixture(scope="function")
+def redis_conn_with_in_progress_job_no_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name]
+ "redis connection with partially processed job, with no errors"
+ the_job = {
+ "job_id": job_id, "command": "some_test_command",
+ "status": "Processing", "filename": "/path/to/some/file.tsv",
+ "percent": 32.242342
+ }
+ with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
+ rconn.hset(name=job_id, mapping=the_job)
+ yield rconn
+ rconn.hdel(job_id, *the_job.keys())
+ rconn.delete(job_id)
+
+@pytest.fixture(scope="function")
+def redis_conn_with_in_progress_job_some_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name]
+ "redis connection with partially processed job, with some errors"
+ the_job = {
+ "job_id": job_id, "command": "some_test_command",
+ "status": "Processing", "filename": "/path/to/some/file.tsv",
+ "percent": 45.34245, "errors": jsonpickle.encode((
+ DuplicateHeading(
+ 1, (5,13,19), "DupHead", "Heading 'DupHead' is repeated"),
+ InvalidValue(45, 2, "ohMy", "Invalid value 'ohMy'")))
+ }
+ with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
+ rconn.hset(name=job_id, mapping=the_job)
+ yield rconn
+ rconn.hdel(job_id, *the_job.keys())
+ rconn.delete(job_id)
+
+@pytest.fixture(scope="function")
+def redis_conn_with_completed_job_no_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name]
+ "redis connection with completely processed job, with no errors"
+ the_job = {
+ "job_id": job_id, "command": "some_test_command",
+ "status": "success", "filename": "/path/to/some/file.tsv",
+ "percent": 100
+ }
+ with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
+ rconn.hset(name=job_id, mapping=the_job)
+ yield rconn
+ rconn.hdel(job_id, *the_job.keys())
+ rconn.delete(job_id)
+
+@pytest.fixture(scope="function")
+def redis_conn_with_completed_job_some_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name]
+ "redis connection with completely processed job, with some errors"
+ the_job = {
+ "job_id": job_id, "command": "some_test_command",
+ "status": "success", "filename": "/path/to/some/file.tsv",
+ "percent": 100, "errors": jsonpickle.encode((
+ DuplicateHeading(
+ 1, (5,13,19), "DupHead", "Heading 'DupHead' is repeated"),
+ InvalidValue(45, 2, "ohMy", "Invalid value 'ohMy'")))
+ }
+ with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
+ rconn.hset(name=job_id, mapping=the_job)
+ yield rconn
+ rconn.hdel(job_id, *the_job.keys())
+ rconn.delete(job_id)
diff --git a/tests/qc_app/test_parse.py b/tests/qc_app/test_parse.py
index 46b45d9..a4bfdd5 100644
--- a/tests/qc_app/test_parse.py
+++ b/tests/qc_app/test_parse.py
@@ -6,11 +6,8 @@ import pytest
from qc_app.jobs import job
from tests.conftest import uploadable_file_object
-def module_uuid4():
- "module patch for the `uuid.uuid4()` function"
- return "934c55d8-396e-4959-90e1-2698e9205758"
-
-def test_parse_with_existing_uploaded_file(client, redis_url, monkeypatch):
+def test_parse_with_existing_uploaded_file(
+ client, redis_url, job_id, monkeypatch):
"""
GIVEN: 1. A flask application testing client
2. A valid file, and filetype
@@ -19,7 +16,7 @@ def test_parse_with_existing_uploaded_file(client, redis_url, monkeypatch):
1. the system redirects to the job/parse status page
2. the job is placed on redis for processing
"""
- monkeypatch.setattr("qc_app.jobs.uuid4", module_uuid4)
+ monkeypatch.setattr("qc_app.jobs.uuid4", lambda : job_id)
# Upload a file
filename = "no_data_errors.tsv"
filetype = "average"
@@ -34,14 +31,14 @@ def test_parse_with_existing_uploaded_file(client, redis_url, monkeypatch):
assert b'/parse/status/934c55d8-396e-4959-90e1-2698e9205758' in resp.data
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
- the_job = job(rconn, module_uuid4())
+ the_job = job(rconn, job_id)
- assert the_job["job_id"] == module_uuid4()
+ assert the_job["job_id"] == job_id
assert the_job["filename"] == filename
assert the_job["command"] == " ".join([
"python3", "-m", "scripts.worker", filetype,
f"{client.application.config['UPLOAD_FOLDER']}/{filename}", redis_url,
- module_uuid4()])
+ job_id])
@pytest.mark.parametrize(
"filename,uri,error_msgs",
diff --git a/tests/qc_app/test_progress_indication.py b/tests/qc_app/test_progress_indication.py
new file mode 100644
index 0000000..11a412e
--- /dev/null
+++ b/tests/qc_app/test_progress_indication.py
@@ -0,0 +1,109 @@
+"Test that the progress indication works correctly"
+
+def test_with_non_existing_job(client, redis_conn_with_fresh_job): # pylint: disable=[unused-argument]
+ """
+ GIVEN: 1. A flask application testing client
+ 2. A redis instance with a fresh, unstarted job
+ WHEN: The parsing progress page is loaded for a non existing job
+ THEN: Ensure that the page:
+ 1. Has a meta tag to redirect it to the index page after 5 seconds
+ 2. Has text indicating that the job does not exist
+ """
+ job_id = "non-existent-job-id"
+ resp = client.get(f"/parse/status/{job_id}")
+ assert resp.status_code == 400
+ assert (
+ b"No job, with the id '<em>non-existent-job-id</em>' was found!"
+ in resp.data)
+ assert b'<meta http-equiv="refresh" content="5;url=/">' in resp.data
+
+def test_with_unstarted_job(client, job_id, redis_conn_with_fresh_job): # pylint: disable=[unused-argument]
+ """
+ GIVEN: 1. A flask application testing client
+ 2. A redis instance with a fresh, unstarted job
+ WHEN: The parsing progress page is loaded
+ THEN: Ensure that the page:
+ 1. Has a meta tag to refresh it after 5 seconds
+ 2. Has a progress indicator with zero progress
+ """
+ resp = client.get(f"/parse/status/{job_id}")
+ assert b'<meta http-equiv="refresh" content="5">' in resp.data
+ assert (
+ b'<progress id="job_' +
+ (f'{job_id}').encode("utf8") +
+ b'" value="0.0">0.0</progress>') in resp.data
+
+def test_with_in_progress_no_error_job(
+ client, job_id, redis_conn_with_in_progress_job_no_errors): # pylint: disable=[unused-argument]
+ """
+ GIVEN: 1. A flask application testing client
+ 2. A redis instance with a job in progress, with no errors found in
+ the file so far
+ WHEN: The parsing progress page is loaded
+ THEN: Ensure that the page:
+ 1. Has a meta tag to refresh it after 5 seconds
+ 2. Has a progress indicator with the percent of the file processed
+ indicated
+ """
+ resp = client.get(f"/parse/status/{job_id}")
+ assert b'<meta http-equiv="refresh" content="5">' in resp.data
+ assert (
+ b'<progress id="job_' +
+ (f'{job_id}').encode("utf8") +
+ b'" value="0.32242342">32.242342</progress>') in resp.data
+ assert (
+ b'<span class="alert-success">No errors found so far</span>'
+ in resp.data)
+ assert b"<table" not in resp.data
+
+def test_with_in_progress_job_with_errors(
+ client, job_id, redis_conn_with_in_progress_job_some_errors): # pylint: disable=[unused-argument]
+ """
+ GIVEN: 1. A flask application testing client
+ 2. A redis instance with a job in progress, with some errors found in
+ the file so far
+ WHEN: The parsing progress page is loaded
+ THEN: Ensure that the page:
+ 1. Has a meta tag to refresh it after 5 seconds
+ 2. Has a progress indicator with the percent of the file processed
+ indicated
+ 3. Has a table showing the errors found so far
+ """
+ resp = client.get(f"/parse/status/{job_id}")
+ assert b'<meta http-equiv="refresh" content="5">' in resp.data
+ assert (
+ b'<progress id="job_' +
+ (f'{job_id}').encode("utf8") +
+ b'" value="0.4534245">45.34245</progress>') in resp.data
+ assert (
+ b'<p class="alert-error">We have found the following errors so far</p>'
+ in resp.data)
+ assert b'table class="reports-table">' in resp.data
+ assert b'Duplicate Header' in resp.data
+ assert b'Invalid Value' in resp.data
+
+def test_with_completed_job_no_errors(
+ client, job_id, redis_conn_with_completed_job_no_errors): # pylint: disable=[unused-argument]
+ """
+ GIVEN: 1. A flask application testing client
+ 2. A redis instance with a completed job, with no errors found in
+ the file so far
+ WHEN: The parsing progress page is loaded
+ THEN: Ensure that the response is a redirection to the results page
+ """
+ resp = client.get(f"/parse/status/{job_id}")
+ assert resp.status_code == 302
+ assert f"/parse/results/{job_id}".encode("utf8") in resp.data
+
+def test_with_completed_job_some_errors(
+ client, job_id, redis_conn_with_completed_job_no_errors): # pylint: disable=[unused-argument]
+ """
+ GIVEN: 1. A flask application testing client
+ 2. A redis instance with a completed job, with some errors found in
+ the file so far
+ WHEN: The parsing progress page is loaded
+ THEN: Ensure that the response is a redirection to the results page
+ """
+ resp = client.get(f"/parse/status/{job_id}")
+ assert resp.status_code == 302
+ assert f"/parse/results/{job_id}".encode("utf8") in resp.data