aboutsummaryrefslogtreecommitdiff
path: root/tests/conftest.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/conftest.py')
-rw-r--r--tests/conftest.py52
1 files changed, 37 insertions, 15 deletions
diff --git a/tests/conftest.py b/tests/conftest.py
index b7d3f8a..013c30d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -10,7 +10,10 @@ import jsonpickle
from redis import Redis
from functional_tools import take
+
from qc_app import jobs, create_app
+from qc_app.jobs import JOBS_PREFIX
+
from quality_control.errors import InvalidValue, DuplicateHeading
@pytest.fixture(scope="session")
@@ -56,11 +59,26 @@ def client():
cleanup_redis(app.config["REDIS_URL"], test_prefix)
@pytest.fixture(scope="module")
+def db_url(client):#pylint: disable=[redefined-outer-name]
+ """Return the database URI"""
+ return client.application.config["SQL_URI"]
+
+@pytest.fixture(scope="module")
def redis_url(client):#pylint: disable=[redefined-outer-name]
"""Return the redis URI"""
return client.application.config["REDIS_URL"]
@pytest.fixture(scope="module")
+def redis_prefix(client):#pylint: disable=[redefined-outer-name]
+ """Return the redis prefix"""
+ return client.application.config["GNQC_REDIS_PREFIX"]
+
+@pytest.fixture(scope="module")
+def jobs_prefix(redis_prefix):#pylint: disable=[redefined-outer-name]
+ """Return the redis prefix for jobs."""
+ return f"{redis_prefix}:{JOBS_PREFIX}"
+
+@pytest.fixture(scope="module")
def redis_ttl(client):#pylint: disable=[redefined-outer-name]
"""Return the redis URI"""
return client.application.config["JOBS_TTL_SECONDS"]
@@ -81,11 +99,11 @@ def cleanup_job(rconn, jobid, thejob):
rconn.delete(jobs.job_key(jobs.jobsnamespace(), jobid))
@pytest.fixture(scope="function")
-def redis_conn_with_fresh_job(redis_url, redis_ttl, job_id):#pylint: disable=[redefined-outer-name]
+def redis_conn_with_fresh_job(redis_url, redis_ttl, jobs_prefix, job_id):#pylint: disable=[redefined-outer-name]
"redis connection with fresh, unprocessed job"
thejob = {
- "job_id": job_id, "command": "some_test_command", "job_type": "testjob",
- "ttl_seconds": redis_ttl, "extra_meta": {
+ "jobid": job_id, "command": "some_test_command", "job_type": "testjob",
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "extra_meta": {
"filename": "/path/to/some/file.tsv", "percent": 0,
"status": "pending"}}
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
@@ -94,11 +112,12 @@ def redis_conn_with_fresh_job(redis_url, redis_ttl, job_id):#pylint: disable=[re
cleanup_job(rconn, job_id, thejob)
@pytest.fixture(scope="function")
-def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, job_id):#pylint: disable=[redefined-outer-name]
+def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, jobs_prefix, job_id):#pylint: disable=[redefined-outer-name]
"redis connection with partially processed job, with no errors"
thejob = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": "some_test_command",
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "Processing", "filename": "/path/to/some/file.tsv",
"percent": 32.242342}}
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
@@ -107,11 +126,12 @@ def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, job_id):#pyl
cleanup_job(rconn, job_id, thejob)
@pytest.fixture(scope="function")
-def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name]
+def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, jobs_prefix, job_id): # pylint: disable=[redefined-outer-name]
"redis connection with partially processed job, with some errors"
the_job = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": "some_test_command",
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "Processing", "filename": "/path/to/some/file.tsv",
"percent": 45.34245, "errors": jsonpickle.encode((
DuplicateHeading(
@@ -124,11 +144,12 @@ def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, job_id): #
cleanup_job(rconn, job_id, the_job)
@pytest.fixture(scope="function")
-def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name]
+def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, jobs_prefix, job_id): # pylint: disable=[redefined-outer-name]
"redis connection with completely processed job, with no errors"
the_job = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": ["complete", "--woerror", "test-command"],
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "success", "filename": "/path/to/some/file.tsv",
"percent": 100, "errors": jsonpickle.encode(tuple())}}
with redis.Redis.from_url(redis_url, decode_responses=True) as rconn:
@@ -137,11 +158,12 @@ def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, job_id): # pyl
cleanup_job(rconn, job_id, the_job)
@pytest.fixture(scope="function")
-def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name]
+def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, jobs_prefix, job_id): # pylint: disable=[redefined-outer-name]
"redis connection with completely processed job, with some errors"
the_job = {
- "job_id": job_id, "command": "some_test_command",
- "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": {
+ "jobid": job_id, "command": ["complete", "--werror", "test-command"],
+ "ttl_seconds": redis_ttl, "rprefix": jobs_prefix, "job_type": "testjob",
+ "extra_meta": {
"status": "success", "filename": "/path/to/some/file.tsv",
"percent": 100, "errors": jsonpickle.encode((
DuplicateHeading(