aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2024-01-24 12:29:10 +0300
committerFrederick Muriuki Muriithi2024-01-24 12:29:10 +0300
commit07deef46a3f3ba53cc632a9381fb25c55e1017b1 (patch)
treea2f4fd1c9f9f69ed774ceff7d0cc5549f630bf9e /tests
parent96c600723726c3391532d86d17183bea960ece57 (diff)
downloadgn-uploader-07deef46a3f3ba53cc632a9381fb25c55e1017b1.tar.gz
Checks: Update code and tests to ensure all checks pass.
Diffstat (limited to 'tests')
-rw-r--r--tests/conftest.py52
-rw-r--r--tests/qc_app/test_entry.py3
-rw-r--r--tests/qc_app/test_parse.py28
-rw-r--r--tests/qc_app/test_uploads_with_zip_files.py3
4 files changed, 60 insertions, 26 deletions
diff --git a/tests/conftest.py b/tests/conftest.py
index b7d3f8a..013c30d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -10,7 +10,10 @@ import jsonpickle
from redis import Redis
from functional_tools import take
+
from qc_app import jobs, create_app
+from qc_app.jobs import JOBS_PREFIX
+
from quality_control.errors import InvalidValue, DuplicateHeading
@pytest.fixture(scope="session")
@@ -56,11 +59,26 @@ def client():
cleanup_redis(app.config["REDIS_URL"], test_prefix)
@pytest.fixture(scope="module")
+def db_url(client):#pylint: disable=[redefined-outer-name]
+ """Return the database URI"""
+ return client.application.config["SQL_URI"]
+
+@pytest.fixture(scope="module")
def redis_url(client):#pylint: disable=[redefined-outer-name]
"""Return the redis URI"""
return client.application.config["REDIS_URL"]
@pytest.fixture(scope="module")
+def redis_prefix(client):#pylint: disable=[redefined-outer-name]
+ """Return the redis prefix"""
+ return client.application.config["GNQC_REDIS_PREFIX"]
+
+@pytest.fixture(scope="module")
+def jobs_prefix(redis_prefix):#pylint: disable=[redefined-outer-name]
+ """Return the redis prefix for jobs."""
+ return f"{redis_prefix}:{JOBS_PREFIX}"
+
+@pytest.fixture(scope="module")
def redis_ttl(client):#pylint: disable=[redefined-outer-name]
"""Return the redis URI"""
return client.application.config["JOBS_TTL_SECONDS"]
@@ -81,11 +99,11 @@ def cleanup_job(rconn, jobid, thejob):
rconn.delete(jobs.job_key(jobs.jobsnamespace(), jobid))
@pytest.fixture(scope="function")
-def redis_conn_with_fresh_job(redis_url, redis_ttl, job_id):#pylint: disable=[redefined-outer-name]
+def redis_conn_with_fresh_job(redis_url, redis_ttl, jobs_prefix, job_id):#pylint: disable=[redefined-outer-name]
"redis connection with fresh, unprocessed job"
thejob = {
- "job_id": job_id, "command": "some_test_command", "job_type": "testjob",
- "ttl_seconds": redis_ttl, "extra_meta": {
+ "jobid": job_id, "command": "some_test_command", "job_type": "testjob",
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "extra_meta": {
"filename": "/path/to/some/file.tsv", "percent": 0,
"status": "pending"}}
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
@@ -94,11 +112,12 @@ def redis_conn_with_fresh_job(redis_url, redis_ttl, job_id):#pylint: disable=[re
cleanup_job(rconn, job_id, thejob)
@pytest.fixture(scope="function")
-def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, job_id):#pylint: disable=[redefined-outer-name]
+def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, jobs_prefix, job_id):#pylint: disable=[redefined-outer-name]
"redis connection with partially processed job, with no errors"
thejob = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": "some_test_command",
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "Processing", "filename": "/path/to/some/file.tsv",
"percent": 32.242342}}
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
@@ -107,11 +126,12 @@ def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, job_id):#pyl
cleanup_job(rconn, job_id, thejob)
@pytest.fixture(scope="function")
-def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name]
+def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, jobs_prefix, job_id): # pylint: disable=[redefined-outer-name]
"redis connection with partially processed job, with some errors"
the_job = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": "some_test_command",
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "Processing", "filename": "/path/to/some/file.tsv",
"percent": 45.34245, "errors": jsonpickle.encode((
DuplicateHeading(
@@ -124,11 +144,12 @@ def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, job_id): #
cleanup_job(rconn, job_id, the_job)
@pytest.fixture(scope="function")
-def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name]
+def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, jobs_prefix, job_id): # pylint: disable=[redefined-outer-name]
"redis connection with completely processed job, with no errors"
the_job = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": ["complete", "--woerror", "test-command"],
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "success", "filename": "/path/to/some/file.tsv",
"percent": 100, "errors": jsonpickle.encode(tuple())}}
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
@@ -137,11 +158,12 @@ def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, job_id): # pyl
cleanup_job(rconn, job_id, the_job)
@pytest.fixture(scope="function")
-def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name]
+def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, jobs_prefix, job_id): # pylint: disable=[redefined-outer-name]
"redis connection with completely processed job, with some errors"
the_job = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": ["complete", "--werror", "test-command"],
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "success", "filename": "/path/to/some/file.tsv",
"percent": 100, "errors": jsonpickle.encode((
DuplicateHeading(
diff --git a/tests/qc_app/test_entry.py b/tests/qc_app/test_entry.py
index c0be26c..efc72a5 100644
--- a/tests/qc_app/test_entry.py
+++ b/tests/qc_app/test_entry.py
@@ -54,6 +54,7 @@ def test_post_with_correct_data(client):
"""
response = client.post(
"/", data={
+ "speciesid": 1,
"filetype": "average",
"qc_text_file": uploadable_file_object("no_data_errors.tsv")
})
@@ -61,7 +62,7 @@ def test_post_with_correct_data(client):
assert response.status_code == 302
assert b'Redirecting...' in response.data
assert (
- b'/parse/parse?filename=no_data_errors.tsv&filetype=average'
+ b'/parse/parse?speciesid=1&filename=no_data_errors.tsv&filetype=average'
in response.data)
@pytest.mark.parametrize(
diff --git a/tests/qc_app/test_parse.py b/tests/qc_app/test_parse.py
index f173e0a..5e55688 100644
--- a/tests/qc_app/test_parse.py
+++ b/tests/qc_app/test_parse.py
@@ -4,11 +4,18 @@ import sys
import redis
import pytest
-from qc_app.jobs import job
+from qc_app.jobs import job, jobsnamespace
+
from tests.conftest import uploadable_file_object
-def test_parse_with_existing_uploaded_file(
- client, redis_url, job_id, monkeypatch):
+def test_parse_with_existing_uploaded_file(#pylint: disable=[too-many-arguments]
+ client,
+ db_url,
+ redis_url,
+ redis_ttl,
+ jobs_prefix,
+ job_id,
+ monkeypatch):
"""
GIVEN: 1. A flask application testing client
2. A valid file, and filetype
@@ -19,27 +26,30 @@ def test_parse_with_existing_uploaded_file(
"""
monkeypatch.setattr("qc_app.jobs.uuid4", lambda : job_id)
# Upload a file
+ speciesid = 1
filename = "no_data_errors.tsv"
filetype = "average"
client.post(
"/", data={
+ "speciesid": speciesid,
"filetype": filetype,
"qc_text_file": uploadable_file_object(filename)})
# Checks
- resp = client.get(f"/parse/parse?filename={filename}&filetype={filetype}")
+ resp = client.get(f"/parse/parse?speciesid={speciesid}&filename={filename}"
+ f"&filetype={filetype}")
assert resp.status_code == 302
assert b'Redirecting...' in resp.data
assert b'/parse/status/934c55d8-396e-4959-90e1-2698e9205758' in resp.data
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
- the_job = job(rconn, job_id)
+ the_job = job(rconn, jobsnamespace(), job_id)
- assert the_job["job_id"] == job_id
+ assert the_job["jobid"] == job_id
assert the_job["filename"] == filename
assert the_job["command"] == " ".join([
- sys.executable, "-m", "scripts.validate_file", filetype,
- f"{client.application.config['UPLOAD_FOLDER']}/{filename}", redis_url,
- job_id])
+ sys.executable, "-m", "scripts.validate_file", db_url, redis_url,
+ jobs_prefix, job_id, "--redisexpiry", str(redis_ttl), str(speciesid),
+ filetype, f"{client.application.config['UPLOAD_FOLDER']}/{filename}"])
@pytest.mark.parametrize(
"filename,uri,error_msgs",
diff --git a/tests/qc_app/test_uploads_with_zip_files.py b/tests/qc_app/test_uploads_with_zip_files.py
index 2a43a2d..fb101ad 100644
--- a/tests/qc_app/test_uploads_with_zip_files.py
+++ b/tests/qc_app/test_uploads_with_zip_files.py
@@ -36,12 +36,13 @@ def test_upload_zipfile_with_one_tsv_file(client):
THEN: Ensure that the system redirects to the correct next URL
"""
resp = client.post("/", data={
+ "speciesid": 1,
"filetype": "average",
"qc_text_file": uploadable_file_object("average.tsv.zip")})
assert resp.status_code == 302
assert b"Redirecting..." in resp.data
assert (
- b"/parse/parse?filename=average.tsv.zip&filetype=average"
+ b"/parse/parse?speciesid=1&filename=average.tsv.zip&filetype=average"
in resp.data)
def test_upload_zipfile_with_one_non_tsv_file(client):