about summary refs log tree commit diff
path: root/tests/uploader
diff options
context:
space:
mode:
Diffstat (limited to 'tests/uploader')
-rw-r--r--tests/uploader/__init__.py0
-rw-r--r--tests/uploader/phenotypes/__init__.py1
-rw-r--r--tests/uploader/phenotypes/test_misc.py387
-rw-r--r--tests/uploader/publications/__init__.py1
-rw-r--r--tests/uploader/publications/test_misc.py68
-rw-r--r--tests/uploader/test_entry.py43
-rw-r--r--tests/uploader/test_expression_data_pages.py92
-rw-r--r--tests/uploader/test_files.py17
-rw-r--r--tests/uploader/test_parse.py97
-rw-r--r--tests/uploader/test_progress_indication.py109
-rw-r--r--tests/uploader/test_results_page.py68
-rw-r--r--tests/uploader/test_uploads_with_zip_files.py84
12 files changed, 967 insertions, 0 deletions
diff --git a/tests/uploader/__init__.py b/tests/uploader/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/uploader/__init__.py
diff --git a/tests/uploader/phenotypes/__init__.py b/tests/uploader/phenotypes/__init__.py
new file mode 100644
index 0000000..1e0a932
--- /dev/null
+++ b/tests/uploader/phenotypes/__init__.py
@@ -0,0 +1 @@
+"""phenotypes tests"""
diff --git a/tests/uploader/phenotypes/test_misc.py b/tests/uploader/phenotypes/test_misc.py
new file mode 100644
index 0000000..cf475ad
--- /dev/null
+++ b/tests/uploader/phenotypes/test_misc.py
@@ -0,0 +1,387 @@
+"""Test miscellaneous phenotypes functions."""
+
+import pytest
+
+from uploader.phenotypes.misc import phenotypes_data_differences
+
+__sample_db_phenotypes_data__ = (
+    {
+        "PhenotypeId": 4,
+        "xref_id": 10001,
+        "DataId": 8967043,
+        "data": {
+            "B6D2F1": {"StrainId": 1, "value": None},
+            "C57BL/6J": {"StrainId": 2, "value": None},
+            "DBA/2J": {"StrainId": 3, "value": None},
+            "BXD1": {"StrainId": 4, "value": 61.4},
+            "BXD2": {"StrainId": 5, "value": 49},
+            "BXD5": {"StrainId": 6, "value": 62.5},
+            "BXD6": {"StrainId": 7, "value": 53.1}
+        }
+    },
+    {
+        "PhenotypeId": 10,
+        "xref_id": 10002,
+        "DataId": 8967044,
+        "data": {
+            "B6D2F1": {"StrainId": 1, "value": None},
+            "C57BL/6J": {"StrainId": 2, "value": None},
+            "DBA/2J": {"StrainId": 3, "value": None},
+            "BXD1": {"StrainId": 4, "value": 54.1},
+            "BXD2": {"StrainId": 5, "value": 50.1},
+            "BXD5": {"StrainId": 6, "value": 53.3},
+            "BXD6": {"StrainId": 7, "value": 55.1}
+        }
+    },
+    {
+        "PhenotypeId": 15,
+        "xref_id": 10003,
+        "DataId": 8967045,
+        "data": {
+            "B6D2F1": {"StrainId": 1, "value": None},
+            "C57BL/6J": {"StrainId": 2, "value": None},
+            "DBA/2J": {"StrainId": 3, "value": None},
+            "BXD1": {"StrainId": 4, "value": 483},
+            "BXD2": {"StrainId": 5, "value": 403},
+            "BXD5": {"StrainId": 6, "value": 501},
+            "BXD6": {"StrainId": 7, "value": 403}
+        }
+    },
+    {
+        "PhenotypeId": 20,
+        "xref_id": 10004,
+        "DataId": 8967046,
+        "data": {
+            "B6D2F1": {"StrainId": 1, "value": None},
+            "C57BL/6J": {"StrainId": 2, "value": None},
+            "DBA/2J": {"StrainId": 3, "value": None},
+            "BXD1": {"StrainId": 4, "value": 49.8},
+            "BXD2": {"StrainId": 5, "value": 45.5},
+            "BXD5": {"StrainId": 6, "value": 62.9},
+            "BXD6": {"StrainId": 7, "value": None}
+        }
+    },
+    {
+        "PhenotypeId": 25,
+        "xref_id": 10005,
+        "DataId": 8967047,
+        "data": {
+            "B6D2F1": {"StrainId": 1, "value": None},
+            "C57BL/6J": {"StrainId": 2, "value": None},
+            "DBA/2J": {"StrainId": 3, "value": None},
+            "BXD1": {"StrainId": 4, "value": 46},
+            "BXD2": {"StrainId": 5, "value": 44.9},
+            "BXD5": {"StrainId": 6, "value": 52.5},
+            "BXD6": {"StrainId": 7, "value": None}
+        }
+    })
+
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+    "filedata,dbdata,expected",
+    ((tuple(), tuple(), tuple()), # No data
+
+     # No data difference
+     (({
+             "phenotype_id": 4,
+             "xref_id": 10001,
+             "data": {
+                 "B6D2F1": None,
+                 "C57BL/6J": None,
+                 "DBA/2J": None,
+                 "BXD1": 61.4,
+                 "BXD2": 49,
+                 "BXD5":62.5,
+                 "BXD6": 53.1
+             }
+     },
+          {
+              "phenotype_id": 10,
+              "xref_id": 10002,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 54.1,
+                  "BXD2": 50.1,
+                  "BXD5": 53.3,
+                  "BXD6": 55.1
+              }
+          },
+          {
+              "phenotype_id": 15,
+              "xref_id": 10003,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 483,
+                  "BXD2": 403,
+                  "BXD5": 501,
+                  "BXD6": 403
+              }
+          },
+          {
+              "phenotype_id": 20,
+              "xref_id": 10004,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 49.8,
+                  "BXD2": 45.5,
+                  "BXD5": 62.9,
+                  "BXD6": None
+              }
+          },
+          {
+              "phenotype_id": 25,
+              "xref_id": 10005,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 46,
+                  "BXD2": 44.9,
+                  "BXD5": 52.5,
+                  "BXD6": None
+              }
+          }),
+         __sample_db_phenotypes_data__,
+         tuple()),
+
+     # Change values: No deletions
+     (({
+             "phenotype_id": 4,
+             "xref_id": 10001,
+             "data": {
+                 "B6D2F1": None,
+                 "C57BL/6J": None,
+                 "DBA/2J": None,
+                 "BXD1": 77.2,
+                 "BXD2": 49,
+                 "BXD5":62.5,
+                 "BXD6": 53.1
+             }
+     },
+          {
+              "phenotype_id": 10,
+              "xref_id": 10002,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 54.1,
+                  "BXD2": 50.1,
+                  "BXD5": 53.3,
+                  "BXD6": 55.1
+              }
+          },
+          {
+              "phenotype_id": 15,
+              "xref_id": 10003,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 483,
+                  "BXD2": 403,
+                  "BXD5": 503,
+                  "BXD6": 903
+              }
+          },
+          {
+              "phenotype_id": 20,
+              "xref_id": 10004,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": 1,
+                  "BXD1": 8,
+                  "BXD2": 9,
+                  "BXD5": 62.9,
+                  "BXD6": None
+              }
+          },
+          {
+              "phenotype_id": 25,
+              "xref_id": 10005,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 46,
+                  "BXD2": 44.9,
+                  "BXD5": 52.5,
+                  "BXD6": None
+              }
+          }),
+         __sample_db_phenotypes_data__,
+      ({
+          "PhenotypeId": 4,
+          "xref_id": 10001,
+          "DataId": 8967043,
+          "StrainId": 4,
+          "StrainName": "BXD1",
+          "value": 77.2
+      },
+       {
+           "PhenotypeId": 15,
+           "xref_id": 10003,
+           "DataId": 8967045,
+           "StrainId": 6,
+           "StrainName": "BXD5",
+           "value": 503
+       },
+       {
+           "PhenotypeId": 15,
+           "xref_id": 10003,
+           "DataId": 8967045,
+           "StrainId": 7,
+           "StrainName": "BXD6",
+           "value": 903
+       },
+       {
+           "PhenotypeId": 20,
+           "xref_id": 10004,
+           "DataId": 8967046,
+           "StrainId": 3,
+           "StrainName": "DBA/2J",
+           "value": 1
+       },
+       {
+           "PhenotypeId": 20,
+           "xref_id": 10004,
+           "DataId": 8967046,
+           "StrainId": 4,
+           "StrainName": "BXD1",
+           "value": 8
+       },
+       {
+           "PhenotypeId": 20,
+           "xref_id": 10004,
+           "DataId": 8967046,
+           "StrainId": 5,
+           "StrainName": "BXD2",
+           "value": 9
+       })),
+
+     # Changes — with deletions
+     (({
+             "phenotype_id": 4,
+             "xref_id": 10001,
+             "data": {
+                 "B6D2F1": None,
+                 "C57BL/6J": None,
+                 "DBA/2J": None,
+                 "BXD1": None,
+                 "BXD2": 49,
+                 "BXD5":62.5,
+                 "BXD6": 53.1
+             }
+     },
+          {
+              "phenotype_id": 10,
+              "xref_id": 10002,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 54.1,
+                  "BXD2": 50.1,
+                  "BXD5": 53.3,
+                  "BXD6": 55.1
+              }
+          },
+          {
+              "phenotype_id": 15,
+              "xref_id": 10003,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 483,
+                  "BXD2": 403,
+                  "BXD5": None,
+                  "BXD6": None
+              }
+          },
+          {
+              "phenotype_id": 20,
+              "xref_id": 10004,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": 15,
+                  "BXD1": None,
+                  "BXD2": 24,
+                  "BXD5": 62.9,
+                  "BXD6": None
+              }
+          },
+          {
+              "phenotype_id": 25,
+              "xref_id": 10005,
+              "data": {
+                  "B6D2F1": None,
+                  "C57BL/6J": None,
+                  "DBA/2J": None,
+                  "BXD1": 46,
+                  "BXD2": 44.9,
+                  "BXD5": 52.5,
+                  "BXD6": None
+              }
+          }),
+         __sample_db_phenotypes_data__,
+      ({
+          "PhenotypeId": 4,
+          "xref_id": 10001,
+          "DataId": 8967043,
+          "StrainId": 4,
+          "StrainName": "BXD1",
+          "value": None
+      },
+       {
+           "PhenotypeId": 15,
+           "xref_id": 10003,
+           "DataId": 8967045,
+           "StrainId": 6,
+           "StrainName": "BXD5",
+           "value": None
+       },
+       {
+           "PhenotypeId": 15,
+           "xref_id": 10003,
+           "DataId": 8967045,
+           "StrainId": 7,
+           "StrainName": "BXD6",
+           "value": None
+       },
+       {
+           "PhenotypeId": 20,
+           "xref_id": 10004,
+           "DataId": 8967046,
+           "StrainId": 3,
+           "StrainName": "DBA/2J",
+           "value": 15
+       },
+       {
+           "PhenotypeId": 20,
+           "xref_id": 10004,
+           "DataId": 8967046,
+           "StrainId": 4,
+           "StrainName": "BXD1",
+           "value": None
+       },
+       {
+           "PhenotypeId": 20,
+           "xref_id": 10004,
+           "DataId": 8967046,
+           "StrainId": 5,
+           "StrainName": "BXD2",
+           "value": 24
+       }))))
+def test_phenotypes_data_differences(filedata, dbdata, expected):
+    """Test differences are computed correctly."""
+    assert phenotypes_data_differences(filedata, dbdata) == expected
diff --git a/tests/uploader/publications/__init__.py b/tests/uploader/publications/__init__.py
new file mode 100644
index 0000000..de15e08
--- /dev/null
+++ b/tests/uploader/publications/__init__.py
@@ -0,0 +1 @@
+"""publications tests"""
diff --git a/tests/uploader/publications/test_misc.py b/tests/uploader/publications/test_misc.py
new file mode 100644
index 0000000..8c7e567
--- /dev/null
+++ b/tests/uploader/publications/test_misc.py
@@ -0,0 +1,68 @@
+"""Tests for functions used for bulk editing."""
+import pytest
+
+from uploader.publications.misc import publications_differences
+
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+    "filedata,dbdata,pubmed2pubidmap,expected",
+    (((), (), {}, tuple()), # no data
+
+     # Same Data
+     (({"phenotype_id": 1, "xref_id": 10001, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10002, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10003, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10005, "PubMed_ID": 9999999999999}),
+      ({"PhenotypeId": 1, "xref_id": 10001, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10002, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10003, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10004, "PublicationId": 15,
+        "PubMed_ID": 9999999999999}),
+      {9999999999999: 15},
+      tuple()),
+
+     # Differences: no new pubmeds (all pubmeds in db)
+     (({"phenotype_id": 1, "xref_id": 10001, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10002, "PubMed_ID": 9999999999998},
+       {"phenotype_id": 1, "xref_id": 10003, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10004, "PubMed_ID": 9999999999997}),
+      ({"PhenotypeId": 1, "xref_id": 10001, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10002, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10003, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10004, "PublicationId": 15,
+        "PubMed_ID": None}),
+      {9999999999999: 15, 9999999999998: 18, 9999999999997: 12},
+      ({"PhenotypeId": 1, "xref_id": 10002, "PublicationId": 18,
+          "PubMed_ID": 9999999999998},
+       {"PhenotypeId": 1, "xref_id": 10004, "PublicationId": 12,
+        "PubMed_ID": 9999999999997})),
+
+     # Differences: Deletions of pubmeds
+     (({"phenotype_id": 1, "xref_id": 10001, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10002, "PubMed_ID": None},
+       {"phenotype_id": 1, "xref_id": 10003, "PubMed_ID": 9999999999999},
+       {"phenotype_id": 1, "xref_id": 10004, "PubMed_ID": None}),
+      ({"PhenotypeId": 1, "xref_id": 10001, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10002, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10003, "PublicationId": 15,
+        "PubMed_ID": 9999999999999},
+       {"PhenotypeId": 1, "xref_id": 10004, "PublicationId": 15,
+        "PubMed_ID": 9999999999999}),
+      {9999999999999: 15, 9999999999998: 18, 9999999999997: 12},
+      ({"PhenotypeId": 1, "xref_id": 10002, "PublicationId": None,
+        "PubMed_ID": None},
+       {"PhenotypeId": 1, "xref_id": 10004, "PublicationId": None,
+        "PubMed_ID": None}))))
+def test_publications_differences(filedata, dbdata, pubmed2pubidmap, expected):
+    """Test publication differences — flesh out description…"""
+    assert publications_differences(
+        filedata, dbdata, pubmed2pubidmap) == expected
diff --git a/tests/uploader/test_entry.py b/tests/uploader/test_entry.py
new file mode 100644
index 0000000..0c614a5
--- /dev/null
+++ b/tests/uploader/test_entry.py
@@ -0,0 +1,43 @@
+"""Test the entry module in the web-ui"""
+import pytest
+
+@pytest.mark.parametrize(
+    "dataitem,lower",
+    (
+        # expression data UI elements
+        (b'<h2 class="heading">expression data</h2>', True),
+        (b'<a href="/upload"', False),
+        (b'upload expression data</a>', False),
+
+        # samples/cases data UI elements
+        (b'<h2 class="heading">samples/cases</h2>', True),
+        (b'<a href="/samples/upload/species"', False),
+        (b'upload samples/cases', True),
+
+        # R/qtl2 data UI elements
+        (b'<h2 class="heading">r/qtl2 bundles</h2>', True),
+        (b'<a href="/upload/rqtl2/select-species"', False),
+        (b'upload r/qtl2 bundle', True)
+    ))
+def test_landing_page_has_sections(client, dataitem, lower):
+    """
+    GIVEN: A flask application testing client
+    WHEN: the index page is requested
+    THEN: ensure the page has the expected UI elements
+    """
+    resp = client.get("/")
+    assert resp.status_code == 200
+    assert dataitem in (resp.data.lower() if lower else resp.data)
+
+
+def test_landing_page_fails_with_post(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: the index page is requested with the "POST" method
+    THEN: ensure the system fails
+    """
+    resp = client.post("/")
+    assert resp.status_code == 405
+    assert (
+        b'<h1>405: The method is not allowed for the requested URL.</h1>'
+        in resp.data)
diff --git a/tests/uploader/test_expression_data_pages.py b/tests/uploader/test_expression_data_pages.py
new file mode 100644
index 0000000..c2f7de1
--- /dev/null
+++ b/tests/uploader/test_expression_data_pages.py
@@ -0,0 +1,92 @@
+"""Test expression data path"""
+import pytest
+
+from tests.conftest import uploadable_file_object
+
+def test_basic_elements_present_in_index_page(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: the index page is requested with the "GET" method and no data
+    THEN: verify that the response contains error notifications
+    """
+    response = client.get("/upload")
+    assert response.status_code == 200
+    ## form present
+    assert b'<form action="/upload"' in response.data
+    assert b'method="POST"' in response.data
+    assert b'enctype="multipart/form-data"' in response.data
+    assert b'</form>' in response.data
+    ## filetype elements
+    assert b'<input type="radio" name="filetype"' in response.data
+    assert b'id="filetype_standard_error"' in response.data
+    assert b'id="filetype_average"' in response.data
+    ## file upload elements
+    assert b'<label for="file_upload"' in response.data
+    assert b'select file' in response.data
+    assert b'<input type="file" name="qc_text_file"' in response.data
+    assert b'id="file_upload"' in response.data
+    ## submit button
+    assert b'<button type="submit"' in response.data
+
+
+def test_post_notifies_errors_if_no_data_is_provided(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: the index page is requested with the "POST" method and with no
+          data provided
+    THEN: ensure the system responds woit the appropriate error messages
+    """
+    response = client.post("/upload", data={}, follow_redirects=True)
+    assert len(response.history) == 1
+    redirect = response.history[0]
+    assert redirect.status_code == 302
+    assert redirect.location == "/upload"
+
+    assert response.status_code == 200
+    assert b'Invalid file type provided.' in response.data
+    assert b'No file was uploaded.' in response.data
+
+def test_post_with_correct_data(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: the index page is requested with the "POST" method and with the
+          appropriate data provided
+    THEN: ensure the system redirects to the parse endpoint with the filename
+          and filetype
+    """
+    response = client.post(
+        "/upload", data={
+            "speciesid": 1,
+            "filetype": "average",
+            "qc_text_file": uploadable_file_object("no_data_errors.tsv")
+        })
+
+    assert response.status_code == 302
+    assert b'Redirecting...' in response.data
+    assert (
+        b'/parse/parse?speciesid=1&amp;filename=no_data_errors.tsv&amp;filetype=average'
+        in response.data)
+
+
+@pytest.mark.parametrize(
+    "request_data,error_message",
+    (({"filetype": "invalid_choice",
+       "qc_text_file": uploadable_file_object("no_data_errors.tsv")},
+      b'Invalid file type provided.'),
+     ({"filetype": "average"}, b'No file was uploaded.'),
+     ({"filetype": "standard-error"}, b'No file was uploaded.')))
+def test_post_with_missing_or_invalid_data(client, request_data,error_message):
+    """
+    GIVEN: A flask application testing client
+    WHEN: the index page is requested with the "POST" method and with data
+          either being missing or invalid
+    THEN: ensure that the system responds with the appropriate error message
+    """
+    response = client.post("/upload", data=request_data, follow_redirects=True)
+    assert len(response.history) == 1
+    redirect = response.history[0]
+    assert redirect.status_code == 302
+    assert redirect.location == "/upload"
+
+    assert response.status_code == 200
+    assert error_message in response.data
diff --git a/tests/uploader/test_files.py b/tests/uploader/test_files.py
new file mode 100644
index 0000000..cb22fff
--- /dev/null
+++ b/tests/uploader/test_files.py
@@ -0,0 +1,17 @@
+"""Tests functions in the `uploader.files` module."""
+from pathlib import Path
+
+import pytest
+
+from uploader.files import sha256_digest_over_file
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+    "filepath,expectedhash",
+    ((Path("tests/test_data/average.tsv.zip"),
+      "a371c654c095c030edad468e1c3d6b176ea8adfbcd91a322afd37779044478d9"),
+     (Path("tests/test_data/standarderror.tsv"),
+      "a08332e0b06391d50eecb722f69d85fbdf374a2d77713ee879d3fd6c60419d55")))
+def test_sha256_digest_over_file(filepath: Path, expectedhash: str):
+    """Test the `sha256_digest_over_file` function."""
+    assert sha256_digest_over_file(filepath) == expectedhash
diff --git a/tests/uploader/test_parse.py b/tests/uploader/test_parse.py
new file mode 100644
index 0000000..20c75b7
--- /dev/null
+++ b/tests/uploader/test_parse.py
@@ -0,0 +1,97 @@
+"""Test the 'parse' module in the web-ui"""
+import sys
+
+import redis
+import pytest
+
+from uploader.jobs import job, jobsnamespace
+
+from tests.conftest import uploadable_file_object
+
+def test_parse_with_existing_uploaded_file(
+        #pylint: disable=[too-many-arguments,too-many-positional-arguments]
+        client,
+        db_url,
+        redis_url,
+        redis_ttl,
+        jobs_prefix,
+        job_id,
+        monkeypatch):
+    """
+    GIVEN: 1. A flask application testing client
+           2. A valid file, and filetype
+    WHEN: The file is uploaded, and the parsing triggered
+    THEN: Ensure that:
+          1. the system redirects to the job/parse status page
+          2. the job is placed on redis for processing
+    """
+    monkeypatch.setattr("uploader.jobs.uuid4", lambda : job_id)
+    # Upload a file
+    speciesid = 1
+    filename = "no_data_errors.tsv"
+    filetype = "average"
+    client.post(
+        "/upload", data={
+            "speciesid": speciesid,
+            "filetype": filetype,
+            "qc_text_file": uploadable_file_object(filename)})
+    # Checks
+    resp = client.get(f"/parse/parse?speciesid={speciesid}&filename={filename}"
+                      f"&filetype={filetype}")
+    assert resp.status_code == 302
+    assert b'Redirecting...' in resp.data
+    assert b'/parse/status/934c55d8-396e-4959-90e1-2698e9205758' in resp.data
+
+    with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
+        the_job = job(rconn, jobsnamespace(), job_id)
+
+    assert the_job["jobid"] == job_id
+    assert the_job["filename"] == filename
+    assert the_job["command"] == " ".join([
+        sys.executable, "-m", "scripts.validate_file", db_url, redis_url,
+        jobs_prefix, job_id, "--redisexpiry", str(redis_ttl), str(speciesid),
+        filetype, f"{client.application.config['UPLOAD_FOLDER']}/{filename}"])
+
+@pytest.mark.parametrize(
+    "filename,uri,error_msgs",
+    (("non_existent.file",
+      "/parse/parse?filename=non_existent.file&filename=average",
+      [b'Selected file does not exist (any longer)']),
+     ("non_existent.file",
+      "/parse/parse?filename=non_existent.file&filename=standard-error",
+      [b'Selected file does not exist (any longer)']),
+     ("non_existent.file",
+      "/parse/parse?filename=non_existent.file&filename=percival",
+      [b'Selected file does not exist (any longer)',
+       b'Invalid filetype provided']),
+     ("no_data_errors.tsv",
+      "/parse/parse?filename=no_data_errors.tsv&filename=percival",
+      [b'Invalid filetype provided']),
+     ("no_data_errors.tsv",
+      "/parse/parse?filename=no_data_errors.tsv",
+      [b'No filetype provided']),
+     (None, "/parse/parse", [b'No file provided', b'No filetype provided'])))
+def test_parse_with_non_uploaded_file(client, filename, uri, error_msgs):
+    """
+    GIVEN: 1. A flask application testing client
+           2. A valid filetype
+           3. A filename to a file that has not been uploaded yet
+    WHEN: The parsing triggered
+    THEN: Ensure that the appropriate errors are displayed
+    """
+    ## Conditionally upload files
+    if filename and filename != "non_existent.file":
+        client.post(
+        "/upload", data={
+            "filetype": "average",
+            "qc_text_file": uploadable_file_object(filename)})
+    # Trigger
+    resp = client.get(uri,follow_redirects=True)
+    ## Check that there was exactly one redirect
+    assert len(resp.history) == 1 and resp.history[0].status_code == 302
+    ## Check that redirect is to home page and is successful
+    assert resp.request.path == "/upload"
+    assert resp.status_code == 200
+    ## Check that error(s) are displayed
+    for error_msg in error_msgs:
+        assert error_msg in resp.data
diff --git a/tests/uploader/test_progress_indication.py b/tests/uploader/test_progress_indication.py
new file mode 100644
index 0000000..14a1050
--- /dev/null
+++ b/tests/uploader/test_progress_indication.py
@@ -0,0 +1,109 @@
+"Test that the progress indication works correctly"
+
+def test_with_non_existing_job(client, redis_conn_with_fresh_job): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a fresh, unstarted job
+    WHEN: The parsing progress page is loaded for a non existing job
+    THEN: Ensure that the page:
+          1. Has a meta tag to redirect it to the index page after 5 seconds
+          2. Has text indicating that the job does not exist
+    """
+    job_id = "non-existent-job-id"
+    resp = client.get(f"/parse/status/{job_id}")
+    assert resp.status_code == 400
+    assert (
+        b"No job, with the id '<em>non-existent-job-id</em>' was found!"
+        in resp.data)
+    assert b'<meta http-equiv="refresh" content="5;url=/upload">' in resp.data
+
+def test_with_unstarted_job(client, job_id, redis_conn_with_fresh_job): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a fresh, unstarted job
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that the page:
+          1. Has a meta tag to refresh it after 5 seconds
+          2. Has a progress indicator with zero progress
+    """
+    resp = client.get(f"/parse/status/{job_id}")
+    assert b'<meta http-equiv="refresh" content="5">' in resp.data
+    assert (
+        b'<progress id="job_' + (f'{job_id}').encode("utf8") + b'"') in resp.data
+    assert b'value="0.0"' in resp.data
+    assert b'0.0</progress>' in resp.data
+
+def test_with_in_progress_no_error_job(
+        client, job_id, redis_conn_with_in_progress_job_no_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a job in progress, with no errors found in
+              the file so far
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that the page:
+          1. Has a meta tag to refresh it after 5 seconds
+          2. Has a progress indicator with the percent of the file processed
+             indicated
+    """
+    resp = client.get(f"/parse/status/{job_id}")
+    assert b'<meta http-equiv="refresh" content="5">' in resp.data
+    assert (
+        b'<progress id="job_' + (f'{job_id}').encode("utf8") + b'"') in resp.data
+    assert b'value="0.32242342"' in resp.data
+    assert b'32.242342</progress>' in resp.data
+    assert (
+        b'<span >No errors found so far</span>'
+        in resp.data)
+    assert b"<table" not in resp.data
+
+def test_with_in_progress_job_with_errors(
+        client, job_id, redis_conn_with_in_progress_job_some_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a job in progress, with some errors found in
+              the file so far
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that the page:
+          1. Has a meta tag to refresh it after 5 seconds
+          2. Has a progress indicator with the percent of the file processed
+             indicated
+          3. Has a table showing the errors found so far
+    """
+    resp = client.get(f"/parse/status/{job_id}")
+    assert b'<meta http-equiv="refresh" content="5">' in resp.data
+    assert (
+        b'<progress id="job_' + (f'{job_id}').encode("utf8") + b'"') in resp.data
+    assert b'value="0.4534245"' in resp.data
+    assert b'45.34245</progress>' in resp.data
+    assert (
+        b'<p class="alert-danger">We have found the following errors so far</p>'
+        in resp.data)
+    assert b'table class="table reports-table">' in resp.data
+    assert b'Duplicate Header' in resp.data
+    assert b'Invalid Value' in resp.data
+
+def test_with_completed_job_no_errors(
+        client, job_id, redis_conn_with_completed_job_no_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a completed job, with no errors found in
+              the file so far
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that the response is a redirection to the results page
+    """
+    resp = client.get(f"/parse/status/{job_id}")
+    assert resp.status_code == 302
+    assert f"/parse/results/{job_id}".encode("utf8") in resp.data
+
+def test_with_completed_job_some_errors(
+        client, job_id, redis_conn_with_completed_job_no_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a completed job, with some errors found in
+              the file so far
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that the response is a redirection to the results page
+    """
+    resp = client.get(f"/parse/status/{job_id}")
+    assert resp.status_code == 302
+    assert f"/parse/results/{job_id}".encode("utf8") in resp.data
diff --git a/tests/uploader/test_results_page.py b/tests/uploader/test_results_page.py
new file mode 100644
index 0000000..8c8379f
--- /dev/null
+++ b/tests/uploader/test_results_page.py
@@ -0,0 +1,68 @@
+"Test results page"
+
+def test_results_with_stderr_output(
+        client, job_id, stderr_with_output, # pylint: disable=[unused-argument]
+        redis_conn_with_in_progress_job_no_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A file with content to simulate the stderr output
+           3. A sample job to prevent the "No such job" error message
+    WHEN: The parsing progress page is loaded for a non existing job
+    THEN: Ensure that the page:
+          1. Redirects to a job failure display page
+          2. The job failure display page:
+             a) indicates that this is a worker failure
+             b) provides some debugging information
+    """
+    # Maybe get rid of the use of a stderr file, and check for actual exceptions
+    resp = client.get(f"/parse/status/{job_id}", follow_redirects=True)
+    assert len(resp.history) == 1
+    assert b'<h1 class="heading">Worker Failure</h1>' in resp.data
+    assert b'<h4>Debugging Information</h4>' in resp.data
+    assert (
+        f"<li><strong>job id</strong>: {job_id}</li>".encode("utf8")
+        in resp.data)
+
+def test_results_with_completed_job_no_errors(
+        client, job_id, stderr_with_no_output, # pylint: disable=[unused-argument]
+        redis_conn_with_completed_job_no_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a completed job, with no errors found in
+              the file
+           3. A file with no contents to simulate no stderr output
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that:
+          1. the system redirects to the results page
+          2. the results page indicates that there are no errors in the file
+             being processed
+    """
+    resp = client.get(f"/parse/status/{job_id}", follow_redirects=True)
+    assert len(resp.history) == 1
+    assert (
+        b'<span class="alert-success">No errors found in the file</span>'
+        in resp.data)
+
+def test_results_with_completed_job_some_errors(
+        client, job_id, stderr_with_no_output, # pylint: disable=[unused-argument]
+        redis_conn_with_completed_job_some_errors): # pylint: disable=[unused-argument]
+    """
+    GIVEN: 1. A flask application testing client
+           2. A redis instance with a completed job, with some errors found in
+              the file
+           3. A file with no contents to simulate no stderr output
+    WHEN: The parsing progress page is loaded
+    THEN: Ensure that:
+          1. the system redirects to the results page
+          2. the results page displays the errors found
+    """
+    resp = client.get(f"/parse/status/{job_id}", follow_redirects=True)
+    assert len(resp.history) == 1
+    assert (
+        b'<p class="alert-danger">We found the following errors</p>'
+        in resp.data)
+    assert b'<table class="table reports-table">' in resp.data
+    assert b'Duplicate Header' in resp.data
+    assert b'<td>Heading &#39;DupHead&#39; is repeated</td>' in resp.data
+    assert b'Invalid Value' in resp.data
+    assert b'<td>Invalid value &#39;ohMy&#39;</td>' in resp.data
diff --git a/tests/uploader/test_uploads_with_zip_files.py b/tests/uploader/test_uploads_with_zip_files.py
new file mode 100644
index 0000000..1506cfa
--- /dev/null
+++ b/tests/uploader/test_uploads_with_zip_files.py
@@ -0,0 +1,84 @@
+"""Test the upload of zip files"""
+from tests.conftest import uploadable_file_object
+
+def test_upload_zipfile_with_zero_files(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: A zip file with no files is uploaded
+    THEN: Ensure that the system responds with the appropriate error message and
+          status code
+    """
+    resp = client.post("/upload",
+                       data={
+                           "filetype": "average",
+                           "qc_text_file": uploadable_file_object("empty.zip")},
+                       follow_redirects=True)
+    assert len(resp.history) == 1
+    redirect = resp.history[0]
+    assert redirect.status_code == 302
+    assert redirect.location == "/upload"
+
+    assert resp.status_code == 200
+    assert (b"Expected exactly one (1) member file within the uploaded zip "
+            b"file. Got 0 member files.") in resp.data
+
+def test_upload_zipfile_with_multiple_files(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: A zip file with more than one file is uploaded
+    THEN: Ensure that the system responds with the appropriate error message and
+          status code
+    """
+    resp = client.post(
+        "/upload",
+        data={
+            "filetype": "average",
+            "qc_text_file": uploadable_file_object("multiple_files.zip")},
+        follow_redirects=True)
+    assert len(resp.history) == 1
+    redirect = resp.history[0]
+    assert redirect.status_code == 302
+    assert redirect.location == "/upload"
+
+    assert resp.status_code == 200
+    assert (b"Expected exactly one (1) member file within the uploaded zip "
+            b"file. Got 3 member files.") in resp.data
+
+def test_upload_zipfile_with_one_tsv_file(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: A zip file with exactly one valid TSV file is uploaded
+    THEN: Ensure that the system redirects to the correct next URL
+    """
+    resp = client.post("/upload", data={
+        "speciesid": 1,
+        "filetype": "average",
+        "qc_text_file": uploadable_file_object("average.tsv.zip")})
+    assert resp.status_code == 302
+    assert b"Redirecting..." in resp.data
+    assert (
+        b"/parse/parse?speciesid=1&amp;filename=average.tsv.zip&amp;filetype=average"
+        in resp.data)
+
+def test_upload_zipfile_with_one_non_tsv_file(client):
+    """
+    GIVEN: A flask application testing client
+    WHEN: A zip file with exactly one file, which is not a valid TSV, is
+          uploaded
+    THEN: Ensure that the system responds with the appropriate error message and
+          status code
+    """
+    resp = client.post(
+        "/upload",
+        data={
+            "filetype": "average",
+            "qc_text_file": uploadable_file_object("non_tsv.zip")},
+        follow_redirects=True)
+    assert len(resp.history) == 1
+    redirect = resp.history[0]
+    assert redirect.status_code == 302
+    assert redirect.location == "/upload"
+
+    assert resp.status_code == 200
+    assert (b"Expected the member text file in the uploaded zip file to "
+            b"be a tab-separated file.") in resp.data