about summary refs log tree commit diff
path: root/qc_app/parse.py
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2022-04-26 09:43:18 +0300
committerFrederick Muriuki Muriithi2022-04-26 09:43:18 +0300
commite6895f5bac672d2e1d2a04fe8118fa55c3a40b91 (patch)
tree3597796b13b3b321c8670aa71b080eabf3357b60 /qc_app/parse.py
parenta5477c59452cdb01ab536f11eb5ed6fab015f3af (diff)
downloadgn-uploader-e6895f5bac672d2e1d2a04fe8118fa55c3a40b91.tar.gz
Queue file parsing jobs
Enable the queuing of file parsing jobs, since the files could be
really large and take a long time to parse and present results.

* etc/default_config.py: Add default config for redis server
* manifest.scm: Add redis, and rq as dependencies
* qc_app/__init__.py
* qc_app/jobs.py: module to hold utilities for management of the jobs
* qc_app/parse.py: Enqueue the job - extract file-parsing code to
  callable function
* qc_app/templates/base.html: Enable addition of extra meta tags
* qc_app/templates/job_progress.html: template to display job progress
* qc_app/templates/no_such_job.html: template to indicate when a job
  id is invalid
* quality_control/parsing.py: Add the total size parsed so far
Diffstat (limited to 'qc_app/parse.py')
-rw-r--r--qc_app/parse.py84
1 files changed, 66 insertions, 18 deletions
diff --git a/qc_app/parse.py b/qc_app/parse.py
index aa88260..1ebe637 100644
--- a/qc_app/parse.py
+++ b/qc_app/parse.py
@@ -1,8 +1,16 @@
 """File parsing module"""
 import os
 
-from flask import request, url_for, redirect, Blueprint, current_app as app
+from rq import get_current_job
+from flask import (
+    request,
+    url_for,
+    redirect,
+    Blueprint,
+    render_template,
+    current_app as app)
 
+from . import jobs
 from quality_control.errors import ParseError
 from quality_control.parsing import (
     FileType,
@@ -12,11 +20,37 @@ from quality_control.parsing import (
 
 parsebp = Blueprint("parse", __name__)
 
+def queued_parse(filepath, filetype):
+    job = get_current_job()
+    job.meta["filename"] = os.path.basename(filepath)
+    job.meta["status"] = "in-progress"
+    job.save_meta()
+    filesize = os.stat(filepath).st_size
+    try:
+        parsed = parse_file(
+            filepath, filetype, strain_names(parse_strains("strains.csv")))
+        for line, curr_size in parsed:
+            job.meta["progress"] = (curr_size/filesize) * 100
+            job.meta["status"] = f"Parsed {curr_size} bytes"
+            job.save_meta()
+
+        os.remove(filepath)
+        job.meta["progress"] = 100
+        job.meta["status"] = "success"
+        job.meta["results"] = {"message": "no errors found"}
+        job.save_meta()
+    except ParseError as pe:
+        pe_dict = pe.args[0]
+        job.meta["status"] = "parse-error"
+        job.meta["results"] = {
+            "filename": filename, "filetype": filetype,
+            "position": pe_dict["position"]
+        }
+        job.save_meta()
+
 @parsebp.route("/parse", methods=["GET"])
 def parse():
     """Trigger file parsing"""
-    # TODO: figure out how to redirect with post
-    # TODO: figure out how to stat file and get: total number of lines
     # TODO: Maybe implement external process to parse the files
     errors = False
     filename = request.args.get("filename")
@@ -39,22 +73,36 @@ def parse():
 
     filetype = (
         FileType.AVERAGE if filetype == "average" else FileType.STANDARD_ERROR)
-    try:
-        parsed = parse_file(
-            filepath, filetype, strain_names(parse_strains("strains.csv")))
-        for line in parsed:
-            pass
-        os.remove(filepath)
-        return redirect(url_for(
-            "parse.success", filename=filename, filetype=filetype))
-    except ParseError as pe:
-        pe_dict = pe.args[0]
-        return redirect(url_for(
-            "parse.fail", filename = filename, filetype = filetype,
-            position = pe_dict["position"]))
+    job = jobs.enqueue_job("qc_app.parse.queued_parse", filepath, filetype)
+    job.meta["filename"] = filename
+    job.save_meta()
+    return redirect(url_for("parse.parse_status", job_id=job.get_id()))
+
+@parsebp.route("/status/<job_id>", methods=["GET"])
+def parse_status(job_id):
+    job = jobs.job(job_id)
+    if job:
+        job_id = job.get_id()
+        progress = job.meta.get("progress", 0)
+        status = job.meta["status"]
+        filename = job.meta.get("filename", "uploaded file")
+        if status == "success":
+            return redirect(url_for("parse.results", job_id=job_id))
+
+        if status == "parse-error":
+            return redirect(url_for("parse.fail", job_id=job_id))
+
+        return render_template(
+            "job_progress.html",
+            job_id = job_id,
+            job_status = status,
+            progress = progress,
+            job_name = f"Parsing '{filename}'")
+
+    return render_template("no_such_job.html", job_id=job_id)
 
-@parsebp.route("/success", methods=["GET"])
-def success():
+@parsebp.route("/results/<job_id>", methods=["GET"])
+def results(job_id):
     """Indicates success if parsing the file is successful"""
     return "STUB: Parse success!!!"