about summary refs log tree commit diff
path: root/qc_app/parse.py
diff options
context:
space:
mode:
Diffstat (limited to 'qc_app/parse.py')
-rw-r--r--qc_app/parse.py90
1 files changed, 66 insertions, 24 deletions
diff --git a/qc_app/parse.py b/qc_app/parse.py
index 1ebe637..ffdd123 100644
--- a/qc_app/parse.py
+++ b/qc_app/parse.py
@@ -16,37 +16,37 @@ from quality_control.parsing import (
     FileType,
     parse_file,
     strain_names,
+    parse_errors,
     parse_strains)
 
 parsebp = Blueprint("parse", __name__)
 
 def queued_parse(filepath, filetype):
+    from qc_app import create_app, instance_path
+    app = create_app(instance_path())
+    app.app_context().push()
+
     job = get_current_job()
-    job.meta["filename"] = os.path.basename(filepath)
-    job.meta["status"] = "in-progress"
+    jobs.update_meta(job, status = "in-progress", progress = 0)
     job.save_meta()
-    filesize = os.stat(filepath).st_size
     try:
         parsed = parse_file(
             filepath, filetype, strain_names(parse_strains("strains.csv")))
         for line, curr_size in parsed:
-            job.meta["progress"] = (curr_size/filesize) * 100
-            job.meta["status"] = f"Parsed {curr_size} bytes"
-            job.save_meta()
+            jobs.update_meta(
+                job, progress = (curr_size/job.meta["filesize"]) * 100,
+                message = f"Parsed {curr_size} bytes")
 
         os.remove(filepath)
-        job.meta["progress"] = 100
-        job.meta["status"] = "success"
-        job.meta["results"] = {"message": "no errors found"}
-        job.save_meta()
+        jobs.update_meta(
+            job, progress = 10, status = "success", message = "no errors found")
     except ParseError as pe:
         pe_dict = pe.args[0]
-        job.meta["status"] = "parse-error"
-        job.meta["results"] = {
-            "filename": filename, "filetype": filetype,
-            "position": pe_dict["position"]
-        }
-        job.save_meta()
+        jobs.update_meta(
+            job, status = "parse-error", results = {
+                "filename": os.path.basename(filepath), "filetype": filetype,
+                "position": pe_dict["position"]
+            })
 
 @parsebp.route("/parse", methods=["GET"])
 def parse():
@@ -73,9 +73,10 @@ def parse():
 
     filetype = (
         FileType.AVERAGE if filetype == "average" else FileType.STANDARD_ERROR)
-    job = jobs.enqueue_job("qc_app.parse.queued_parse", filepath, filetype)
-    job.meta["filename"] = filename
-    job.save_meta()
+    job = jobs.enqueue_job(
+        "qc_app.parse.queued_parse", filepath, filetype,
+        additional_jobs_meta = {
+            "filename": filename, "filesize": os.stat(filepath).st_size})
     return redirect(url_for("parse.parse_status", job_id=job.get_id()))
 
 @parsebp.route("/status/<job_id>", methods=["GET"])
@@ -83,7 +84,7 @@ def parse_status(job_id):
     job = jobs.job(job_id)
     if job:
         job_id = job.get_id()
-        progress = job.meta.get("progress", 0)
+        progress = job.meta["progress"]
         status = job.meta["status"]
         filename = job.meta.get("filename", "uploaded file")
         if status == "success":
@@ -97,6 +98,7 @@ def parse_status(job_id):
             job_id = job_id,
             job_status = status,
             progress = progress,
+            message = job.meta.get("message", ""),
             job_name = f"Parsing '{filename}'")
 
     return render_template("no_such_job.html", job_id=job_id)
@@ -106,7 +108,47 @@ def results(job_id):
     """Indicates success if parsing the file is successful"""
     return "STUB: Parse success!!!"
 
-@parsebp.route("/fail", methods=["GET"])
-def fail():
-    """Indicates success if parsing the file is successful"""
-    return "STUB: Parse Failure!!!"
+def queued_collect_errors(filepath, filetype, seek_pos=0):
+    from qc_app import create_app, instance_path
+    app = create_app(instance_path())
+    app.app_context().push()
+
+    job = get_current_job()
+    errors = []
+    count = 0
+
+    for error in parse_errors(
+            filepath, filetype, strain_names(parse_strains("strains.csv")),
+            seek_pos):
+        count = count + 1
+        jobs.update_meta(
+            job, message = f"Collected {count} errors", progress = (
+                (error["position"] / job.meta["filesize"]) * 100))
+        errors.append(error)
+
+    jobs.update_meta(job, errors = errors, progress = 100, status = "success")
+
+@parsebp.route("/fail/<job_id>", methods=["GET"])
+def fail(job_id):
+    """Handle parsing failure"""
+    old_job = jobs.job(job_id)
+    if old_job:
+        old_job.refresh()
+        job_id = old_job.get_id()
+        progress = old_job.meta.get("progress", 0)
+        status = old_job.meta["status"]
+        results = old_job.meta["results"]
+        filename = old_job.meta.get("filename", "uploaded file")
+
+        new_job = jobs.enqueue_job(
+            "qc_app.parse.queued_collect_errors",
+            os.path.join(
+                app.config["UPLOAD_FOLDER"], old_job.meta["filename"]),
+            results["filetype"], results["position"],
+            additional_jobs_meta = {
+                "status": "Collecting Errors",
+                "filename": old_job.meta["filename"],
+                "filesize": old_job.meta["filesize"]})
+        return redirect(url_for("parse.parse_status", job_id=new_job.get_id()))
+
+    return render_template("no_such_job.html", job_id=job_id)