about summary refs log tree commit diff
path: root/qc_app
diff options
context:
space:
mode:
Diffstat (limited to 'qc_app')
-rw-r--r--qc_app/dbinsert.py2
-rw-r--r--qc_app/jobs.py19
-rw-r--r--qc_app/parse.py70
-rw-r--r--qc_app/samples.py2
4 files changed, 47 insertions, 46 deletions
diff --git a/qc_app/dbinsert.py b/qc_app/dbinsert.py
index 31e4cea..f36651f 100644
--- a/qc_app/dbinsert.py
+++ b/qc_app/dbinsert.py
@@ -374,7 +374,7 @@ def insert_data():
                         app.config["JOBS_TTL_SECONDS"]),
                     redisurl, f"{app.config['UPLOAD_FOLDER']}/job_errors")
 
-            return redirect(url_for("dbinsert.insert_status", job_id=job["job_id"]))
+            return redirect(url_for("dbinsert.insert_status", job_id=job["jobid"]))
         return render_error(f"File '{filename}' no longer exists.")
     except AssertionError as aserr:
         return render_error(f"Missing data: {aserr.args[0]}")
diff --git a/qc_app/jobs.py b/qc_app/jobs.py
index 1491015..21889da 100644
--- a/qc_app/jobs.py
+++ b/qc_app/jobs.py
@@ -29,20 +29,20 @@ def job_key(namespaceprefix: str, jobid: Union[str, UUID]) -> str:
     """Build the key by appending it to the namespace prefix."""
     return f"{namespaceprefix}:{jobid}"
 
-def raise_jobnotfound(jobid: Union[str,UUID]):
+def raise_jobnotfound(rprefix:str, jobid: Union[str,UUID]):
     """Utility to raise a `NoSuchJobError`"""
-    raise JobNotFound(f"Could not retrieve job '{jobid}'.")
+    raise JobNotFound(f"Could not retrieve job '{jobid}' from '{rprefix}.")
 
-def error_filename(job_id, error_dir):
+def error_filename(jobid, error_dir):
     "Compute the path of the file where errors will be dumped."
-    return f"{error_dir}/job_{job_id}.error"
+    return f"{error_dir}/job_{jobid}.error"
 
 def initialise_job(# pylint: disable=[too-many-arguments]
         rconn: Redis, rprefix: str, jobid: str, command: list, job_type: str,
         ttl_seconds: int = 86400, extra_meta: Optional[dict] = None) -> dict:
     "Initialise a job 'object' and put in on redis"
     the_job = {
-        "job_id": jobid, "command": shlex.join(command), "status": "pending",
+        "jobid": jobid, "command": shlex.join(command), "status": "pending",
         "percent": 0, "job-type": job_type, **(extra_meta or {})
     }
     rconn.hset(job_key(rprefix, jobid), mapping=the_job)
@@ -95,13 +95,13 @@ def launch_job(the_job: dict, redisurl: str, error_dir):
     if not os.path.exists(error_dir):
         os.mkdir(error_dir)
 
-    job_id = the_job["job_id"]
-    with open(error_filename(job_id, error_dir),
+    jobid = the_job["jobid"]
+    with open(error_filename(jobid, error_dir),
               "w",
               encoding="utf-8") as errorfile:
         subprocess.Popen( # pylint: disable=[consider-using-with]
             [sys.executable, "-m", "scripts.worker", redisurl, jobsnamespace(),
-             job_id],
+             jobid],
             stderr=errorfile,
             env={"PYTHONPATH": ":".join(sys.path)})
 
@@ -109,7 +109,8 @@ def launch_job(the_job: dict, redisurl: str, error_dir):
 
 def job(rconn: Redis, rprefix: str, jobid: Union[str,UUID]):
     "Retrieve the job"
-    thejob = rconn.hgetall(job_key(rprefix, jobid)) or raise_jobnotfound(jobid)
+    thejob = (rconn.hgetall(job_key(rprefix, jobid)) or
+              raise_jobnotfound(rprefix, jobid))
     return thejob
 
 def update_status(
diff --git a/qc_app/parse.py b/qc_app/parse.py
index d9be993..f0c53d1 100644
--- a/qc_app/parse.py
+++ b/qc_app/parse.py
@@ -76,47 +76,47 @@ def parse():
             redisurl,
             f"{app.config['UPLOAD_FOLDER']}/job_errors")
 
-    return redirect(url_for("parse.parse_status", job_id=job["job_id"]))
+    return redirect(url_for("parse.parse_status", job_id=job["jobid"]))
 
 @parsebp.route("/status/<job_id>", methods=["GET"])
 def parse_status(job_id: str):
     "Retrieve the status of the job"
     with Redis.from_url(app.config["REDIS_URL"], decode_responses=True) as rconn:
-        job = jobs.job(rconn, jobs.jobsnamespace(), job_id)
-
-    if job:
-        error_filename = jobs.error_filename(
-            job_id, f"{app.config['UPLOAD_FOLDER']}/job_errors")
-        if os.path.exists(error_filename):
-            stat = os.stat(error_filename)
-            if stat.st_size > 0:
-                return redirect(url_for("parse.fail", job_id=job_id))
-
-        job_id = job["job_id"]
-        progress = float(job["percent"])
-        status = job["status"]
-        filename = job.get("filename", "uploaded file")
-        errors = jsonpickle.decode(
-            job.get("errors", jsonpickle.encode(tuple())))
-        if status in ("success", "aborted"):
-            return redirect(url_for("parse.results", job_id=job_id))
-
-        if status == "parse-error":
+        try:
+            job = jobs.job(rconn, jobs.jobsnamespace(), job_id)
+        except jobs.JobNotFound as _exc:
+            return render_template("no_such_job.html", job_id=job_id), 400
+
+    error_filename = jobs.error_filename(
+        job_id, f"{app.config['UPLOAD_FOLDER']}/job_errors")
+    if os.path.exists(error_filename):
+        stat = os.stat(error_filename)
+        if stat.st_size > 0:
             return redirect(url_for("parse.fail", job_id=job_id))
 
-        app.jinja_env.globals.update(
-            isinvalidvalue=isinvalidvalue,
-            isduplicateheading=isduplicateheading)
-        return render_template(
-            "job_progress.html",
-            job_id = job_id,
-            job_status = status,
-            progress = progress,
-            message = job.get("message", ""),
-            job_name = f"Parsing '{filename}'",
-            errors=errors)
-
-    return render_template("no_such_job.html", job_id=job_id), 400
+    job_id = job["jobid"]
+    progress = float(job["percent"])
+    status = job["status"]
+    filename = job.get("filename", "uploaded file")
+    errors = jsonpickle.decode(
+        job.get("errors", jsonpickle.encode(tuple())))
+    if status in ("success", "aborted"):
+        return redirect(url_for("parse.results", job_id=job_id))
+
+    if status == "parse-error":
+        return redirect(url_for("parse.fail", job_id=job_id))
+
+    app.jinja_env.globals.update(
+        isinvalidvalue=isinvalidvalue,
+        isduplicateheading=isduplicateheading)
+    return render_template(
+        "job_progress.html",
+        job_id = job_id,
+        job_status = status,
+        progress = progress,
+        message = job.get("message", ""),
+        job_name = f"Parsing '{filename}'",
+        errors=errors)
 
 @parsebp.route("/results/<job_id>", methods=["GET"])
 def results(job_id: str):
@@ -135,7 +135,7 @@ def results(job_id: str):
             errors=errors,
             job_name = f"Parsing '{filename}'",
             user_aborted = job.get("user_aborted"),
-            job_id=job["job_id"])
+            job_id=job["jobid"])
 
     return render_template("no_such_job.html", job_id=job_id)
 
diff --git a/qc_app/samples.py b/qc_app/samples.py
index cf97b8d..55e94ab 100644
--- a/qc_app/samples.py
+++ b/qc_app/samples.py
@@ -255,7 +255,7 @@ def upload_samples():
                 redisuri,
                 f"{app.config['UPLOAD_FOLDER']}/job_errors")
             return redirect(url_for(
-                "samples.upload_status", job_id=the_job["job_id"]))
+                "samples.upload_status", job_id=the_job["jobid"]))
 
 @samples.route("/upload/status/<uuid:job_id>", methods=["GET"])
 def upload_status(job_id: uuid.UUID):