aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gn3/jobs.py13
-rw-r--r--gn3/json_encoders_decoders.py31
-rw-r--r--scripts/search_phenotypes.py12
3 files changed, 47 insertions, 9 deletions
diff --git a/gn3/jobs.py b/gn3/jobs.py
index 8854a61..1af63f7 100644
--- a/gn3/jobs.py
+++ b/gn3/jobs.py
@@ -5,9 +5,10 @@ from uuid import UUID, uuid4
from datetime import datetime
from redis import Redis
-
from pymonad.either import Left, Right, Either
+from gn3 import json_encoders_decoders as jed
+
JOBS_NAMESPACE = "GN3::JOBS"
class InvalidCommand(Exception):
@@ -22,8 +23,8 @@ def job(redisconn: Redis, job_id: UUID) -> Either:
the_job = redisconn.hgetall(job_key(job_id))
if the_job:
return Right({
- **the_job,
- "search_results": json.loads(the_job["search_results"])
+ key: json.loads(value, object_hook=jed.custom_json_decoder)
+ for key, value in the_job.items()
})
return Left({
"error": "NotFound",
@@ -48,8 +49,10 @@ def create_job(redisconn: Redis, job_details: dict[str, Any]) -> UUID:
def __create__(_job_command):
job_id = job_details.get("job_id", uuid4())
redisconn.hset(job_key(job_id), mapping={
- **job_details, "job_id": job_id, "created": datetime.now(), "stdout": "",
- "stderr": "", "status": "queued"
+ key: json.dumps(value, cls=jed.CustomJSONEncoder) for key, value in {
+ **job_details, "job_id": job_id, "created": datetime.now(),
+ "status": "queued"
+ }.items()
})
return job_id
def __raise__(err):
diff --git a/gn3/json_encoders_decoders.py b/gn3/json_encoders_decoders.py
new file mode 100644
index 0000000..be15b34
--- /dev/null
+++ b/gn3/json_encoders_decoders.py
@@ -0,0 +1,31 @@
+"""Custom json encoders for various purposes."""
+import json
+from uuid import UUID
+from datetime import datetime
+
+__ENCODERS__ = {
+ UUID: lambda obj: {"__type": "UUID", "__value": str(obj)},
+ datetime: lambda obj: {"__type": "DATETIME", "__value": obj.isoformat()}
+}
+
+class CustomJSONEncoder(json.JSONEncoder):
+ """
+ A custom JSON encoder to handle cases where the default encoder fails.
+ """
+ def default(self, obj):# pylint: disable=[arguments-renamed]
+ """Return a serializable object for `obj`."""
+ if type(obj) in __ENCODERS__:
+ return __ENCODERS__[type(obj)](obj)
+ return json.JSONEncoder.default(self, obj)
+
+
+__DECODERS__ = {
+ "UUID": UUID,
+ "DATETIME": datetime.fromisoformat
+}
+
+def custom_json_decoder(obj_dict):
+ """Decode custom types"""
+ if "__type" in obj_dict:
+ return __DECODERS__[obj_dict["__type"]](obj_dict["__value"])
+ return obj_dict
diff --git a/scripts/search_phenotypes.py b/scripts/search_phenotypes.py
index 4533cce..7049834 100644
--- a/scripts/search_phenotypes.py
+++ b/scripts/search_phenotypes.py
@@ -44,6 +44,10 @@ def remove_linked(search_results, linked: tuple):
"""Remove any item that has been already linked to a user group."""
return (item for item in search_results if __filter_object__(item) not in linked)
+def update_status(redisconn: redis.Redis, redisname, status: str):
+ """Update the status of the search."""
+ redisconn.hset(redisname, "status", json.dumps(status))
+
def update_search_results(redisconn: redis.Redis, redisname: str,
results: tuple[dict[str, Any], ...]):
"""Save the results to redis db."""
@@ -83,7 +87,7 @@ def search(# pylint: disable=[too-many-arguments, too-many-locals]
with (authdb.connection(auth_db_uri) as authconn,
gn3db.database_connection(gn3_db_uri) as gn3conn,
redis.Redis.from_url(redis_uri, decode_responses=True) as redisconn):
- redisconn.hset(redisname, "status", "started")
+ update_status(redisconn, redisname, "started")
update_search_results(redisconn, redisname, tuple()) # init search results
try:
search_query = f"species:{species}" + (
@@ -109,11 +113,11 @@ def search(# pylint: disable=[too-many-arguments, too-many-locals]
except NoSearchResults as _nsr:
pass
except Exception as _exc: # pylint: disable=[broad-except]
- redisconn.hset(redisname, "status", "failed")
- redisconn.hset(redisname, "exception", traceback.format_exc())
+ update_status(redisconn, redisname, "failed")
+ redisconn.hset(redisname, "exception", json.dumps(traceback.format_exc()))
expire_redis_results(redisconn, redisname)
return 1
- redisconn.hset(redisname, "status", "completed")
+ update_status(redisconn, redisname, "completed")
expire_redis_results(redisconn, redisname)
return 0