diff options
author | Frederick Muriuki Muriithi | 2024-01-23 12:08:53 +0300 |
---|---|---|
committer | Frederick Muriuki Muriithi | 2024-01-23 12:11:12 +0300 |
commit | 340376f2e9abba95b3c34495d6fd1c0b38235956 (patch) | |
tree | a3f2de9ec6dd6ad833592aa4e773b4de09652ee7 /tests | |
parent | 105934ca5cf253c0f002a007ee57460a7632e2b8 (diff) | |
download | gn-uploader-340376f2e9abba95b3c34495d6fd1c0b38235956.tar.gz |
Tests: Use prefixes in tests rather than starting redis server
It's easier, and more robust to put the data under a prefix within a
redis database rather than try to run a redis server, especially
within the test environment. This commit updates the tests to use a
redis prefix instead.
Diffstat (limited to 'tests')
-rw-r--r-- | tests/conftest.py | 142 |
1 files changed, 77 insertions, 65 deletions
diff --git a/tests/conftest.py b/tests/conftest.py index d441cb9..b7d3f8a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,16 +1,16 @@ """Set up fixtures for tests""" import io import os -import shutil -import socket -import subprocess -from contextlib import closing +import uuid +from hashlib import sha256 import redis import pytest import jsonpickle +from redis import Redis -from qc_app import create_app +from functional_tools import take +from qc_app import jobs, create_app from quality_control.errors import InvalidValue, DuplicateHeading @pytest.fixture(scope="session") @@ -30,30 +30,40 @@ def strains(): return tuple(stainnames) -def is_port_in_use(port: int) -> bool: - "Check whether `port` is in use" - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sckt: - return sckt.connect_ex(("localhost", port)) == 0 - -@pytest.fixture(scope="session") -def redis_url(): - "Fixture to launch a new redis instance and return appropriate URI" - port = next(# pylint: disable=[stop-iteration-return] - port for port in range(6379,65535) if not is_port_in_use(port)) - command = [shutil.which("redis-server"), "--port", str(port)] - process = subprocess.Popen(command) # pylint: disable=[consider-using-with] - yield f"redis://localhost:{port}" - process.kill() +def cleanup_redis(redisuri: str, prefix: str): + """Delete all keys with given prefix""" + with Redis.from_url(redisuri, decode_responses=True) as rconn: + cur = rconn.scan_iter(f"{prefix}:*") + while True: + batch = take(cur, 500) + if len(batch) <= 0: + break + rconn.delete(*batch) @pytest.fixture(scope="module") -def client(redis_url): # pylint: disable=[redefined-outer-name] +def client(): "Fixture for test client" app = create_app(os.environ.get("QCAPP_INSTANCE_PATH")) + test_prefix = sha256(f"test:{uuid.uuid4()}".encode("utf8")).hexdigest() app.config.update({ - "REDIS_URL": redis_url, - "TESTING": True + "TESTING": True, + "GNQC_REDIS_PREFIX": f"{test_prefix}:GNQC", + "JOBS_TTL_SECONDS": 2 * 60 * 60# 2 hours }) - yield app.test_client() + with app.app_context(): + yield app.test_client() + + cleanup_redis(app.config["REDIS_URL"], test_prefix) + +@pytest.fixture(scope="module") +def redis_url(client):#pylint: disable=[redefined-outer-name] + """Return the redis URI""" + return client.application.config["REDIS_URL"] + +@pytest.fixture(scope="module") +def redis_ttl(client):#pylint: disable=[redefined-outer-name] + """Return the redis URI""" + return client.application.config["JOBS_TTL_SECONDS"] def uploadable_file_object(filename): "Return an 'object' representing the file to be uploaded." @@ -65,80 +75,82 @@ def job_id(): "Return a default UUID4 string as the 'job_id' for test purposes" return "934c55d8-396e-4959-90e1-2698e9205758" +def cleanup_job(rconn, jobid, thejob): + """Delete job from redis.""" + rconn.hdel(jobs.job_key(jobs.jobsnamespace(), jobid), *thejob.keys()) + rconn.delete(jobs.job_key(jobs.jobsnamespace(), jobid)) + @pytest.fixture(scope="function") -def redis_conn_with_fresh_job(redis_url, job_id): # pylint: disable=[redefined-outer-name] +def redis_conn_with_fresh_job(redis_url, redis_ttl, job_id):#pylint: disable=[redefined-outer-name] "redis connection with fresh, unprocessed job" - the_job = { - "job_id": job_id, "command": "some_test_command", "status": "pending", - "filename": "/path/to/some/file.tsv", "percent": 0 - } + thejob = { + "job_id": job_id, "command": "some_test_command", "job_type": "testjob", + "ttl_seconds": redis_ttl, "extra_meta": { + "filename": "/path/to/some/file.tsv", "percent": 0, + "status": "pending"}} with redis.Redis.from_url(redis_url, decode_responses=True) as rconn: - rconn.hset(name=job_id, mapping=the_job) + jobs.initialise_job(rconn, **thejob) yield rconn - rconn.hdel(job_id, *the_job.keys()) - rconn.delete(job_id) + cleanup_job(rconn, job_id, thejob) @pytest.fixture(scope="function") -def redis_conn_with_in_progress_job_no_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name] +def redis_conn_with_in_progress_job_no_errors(redis_url, redis_ttl, job_id):#pylint: disable=[redefined-outer-name] "redis connection with partially processed job, with no errors" - the_job = { + thejob = { "job_id": job_id, "command": "some_test_command", - "status": "Processing", "filename": "/path/to/some/file.tsv", - "percent": 32.242342 - } + "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": { + "status": "Processing", "filename": "/path/to/some/file.tsv", + "percent": 32.242342}} with redis.Redis.from_url(redis_url, decode_responses=True) as rconn: - rconn.hset(name=job_id, mapping=the_job) + jobs.initialise_job(rconn, **thejob) yield rconn - rconn.hdel(job_id, *the_job.keys()) - rconn.delete(job_id) + cleanup_job(rconn, job_id, thejob) @pytest.fixture(scope="function") -def redis_conn_with_in_progress_job_some_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name] +def redis_conn_with_in_progress_job_some_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name] "redis connection with partially processed job, with some errors" the_job = { "job_id": job_id, "command": "some_test_command", - "status": "Processing", "filename": "/path/to/some/file.tsv", - "percent": 45.34245, "errors": jsonpickle.encode(( - DuplicateHeading( - 1, (5,13,19), "DupHead", "Heading 'DupHead' is repeated"), - InvalidValue(45, 2, "ohMy", "Invalid value 'ohMy'"))) + "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": { + "status": "Processing", "filename": "/path/to/some/file.tsv", + "percent": 45.34245, "errors": jsonpickle.encode(( + DuplicateHeading( + 1, (5,13,19), "DupHead", "Heading 'DupHead' is repeated"), + InvalidValue(45, 2, "ohMy", "Invalid value 'ohMy'")))} } with redis.Redis.from_url(redis_url, decode_responses=True) as rconn: - rconn.hset(name=job_id, mapping=the_job) + jobs.initialise_job(rconn, **the_job) yield rconn - rconn.hdel(job_id, *the_job.keys()) - rconn.delete(job_id) + cleanup_job(rconn, job_id, the_job) @pytest.fixture(scope="function") -def redis_conn_with_completed_job_no_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name] +def redis_conn_with_completed_job_no_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name] "redis connection with completely processed job, with no errors" the_job = { "job_id": job_id, "command": "some_test_command", - "status": "success", "filename": "/path/to/some/file.tsv", - "percent": 100, "errors": jsonpickle.encode(tuple()) - } + "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": { + "status": "success", "filename": "/path/to/some/file.tsv", + "percent": 100, "errors": jsonpickle.encode(tuple())}} with redis.Redis.from_url(redis_url, decode_responses=True) as rconn: - rconn.hset(name=job_id, mapping=the_job) + jobs.initialise_job(rconn, **the_job) yield rconn - rconn.hdel(job_id, *the_job.keys()) - rconn.delete(job_id) + cleanup_job(rconn, job_id, the_job) @pytest.fixture(scope="function") -def redis_conn_with_completed_job_some_errors(redis_url, job_id): # pylint: disable=[redefined-outer-name] +def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, job_id): # pylint: disable=[redefined-outer-name] "redis connection with completely processed job, with some errors" the_job = { "job_id": job_id, "command": "some_test_command", - "status": "success", "filename": "/path/to/some/file.tsv", - "percent": 100, "errors": jsonpickle.encode(( - DuplicateHeading( - 1, (5,13,19), "DupHead", "Heading 'DupHead' is repeated"), - InvalidValue(45, 2, "ohMy", "Invalid value 'ohMy'"))) - } + "ttl_seconds": redis_ttl, "job_type": "testjob", "extra_meta": { + "status": "success", "filename": "/path/to/some/file.tsv", + "percent": 100, "errors": jsonpickle.encode(( + DuplicateHeading( + 1, (5,13,19), "DupHead", "Heading 'DupHead' is repeated"), + InvalidValue(45, 2, "ohMy", "Invalid value 'ohMy'")))}} with redis.Redis.from_url(redis_url, decode_responses=True) as rconn: - rconn.hset(name=job_id, mapping=the_job) + jobs.initialise_job(rconn, **the_job) yield rconn - rconn.hdel(job_id, *the_job.keys()) - rconn.delete(job_id) + cleanup_job(rconn, job_id, the_job) @pytest.fixture(scope="function") def uploads_dir(client): # pylint: disable=[redefined-outer-name] |