aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2022-06-02 12:57:18 +0300
committerFrederick Muriuki Muriithi2022-06-02 12:57:18 +0300
commit2f84b2eb1fecab4c381ad2dfd31277717cc9df2d (patch)
treec8a9080401ba40869454f55c88cd359bdbc393dd
parent219248568252e7291f20105ce589c87c5a85f433 (diff)
downloadgn-uploader-2f84b2eb1fecab4c381ad2dfd31277717cc9df2d.tar.gz
Enable user abortion of file parsing
Enable the user to abort the background parsing of the file.
-rw-r--r--qc_app/parse.py17
-rw-r--r--qc_app/static/css/styles.css7
-rw-r--r--qc_app/templates/job_progress.html7
-rw-r--r--qc_app/templates/parse_results.html4
-rw-r--r--quality_control/parsing.py8
-rw-r--r--scripts/worker.py13
6 files changed, 51 insertions, 5 deletions
diff --git a/qc_app/parse.py b/qc_app/parse.py
index 1a1b686..b2a0156 100644
--- a/qc_app/parse.py
+++ b/qc_app/parse.py
@@ -76,7 +76,7 @@ def parse_status(job_id: str):
filename = job.get("filename", "uploaded file")
errors = jsonpickle.decode(
job.get("errors", jsonpickle.encode(tuple())))
- if status == "success":
+ if status == "success" or status == "aborted":
return redirect(url_for("parse.results", job_id=job_id))
if status == "parse-error":
@@ -107,7 +107,8 @@ def results(job_id: str):
return render_template(
"parse_results.html",
errors=errors,
- job_name = f"Parsing '{filename}'")
+ job_name = f"Parsing '{filename}'",
+ user_aborted = job.get("user_aborted"))
return render_template("no_such_job.html", job_id=job_id)
@@ -129,3 +130,15 @@ def fail(job_id: str):
return render_template("parse_failure.html", job=job)
return render_template("no_such_job.html", job_id=job_id)
+
+@parsebp.route("/abort", methods=["POST"])
+def abort():
+ job_id = request.form["job_id"]
+
+ with Redis.from_url(app.config["REDIS_URL"], decode_responses=True) as rconn:
+ job = jobs.job(rconn, job_id)
+
+ if job:
+ rconn.hset(name=job_id, key="user_aborted", value=int(True))
+
+ return redirect(url_for("parse.parse_status", job_id=job_id))
diff --git a/qc_app/static/css/styles.css b/qc_app/static/css/styles.css
index a5f0e01..aa6b049 100644
--- a/qc_app/static/css/styles.css
+++ b/qc_app/static/css/styles.css
@@ -30,6 +30,13 @@ fieldset {
font-weight: bold;
}
+.btn-danger {
+ color: #FEFEFE;
+ border-color: #FF3434;
+ background-color: #FF4545;
+ font-weight: bold;
+}
+
.alert {
display: block;
border-style: solid;
diff --git a/qc_app/templates/job_progress.html b/qc_app/templates/job_progress.html
index accdc1c..acced9b 100644
--- a/qc_app/templates/job_progress.html
+++ b/qc_app/templates/job_progress.html
@@ -18,6 +18,13 @@
<span>{{"%.2f" | format(progress)}}%</span>
<div>
+ <form action="{{url_for('parse.abort')}}" method="POST">
+ <input type="hidden" name="job_id" value="{{job_id}}" />
+ <input type="submit" value="Abort" class="btn btn-danger" />
+ </form>
+</div>
+
+<div>
{{errors_display(errors, "No errors found so far", "We have found the following errors so far")}}
</div>
diff --git a/qc_app/templates/parse_results.html b/qc_app/templates/parse_results.html
index 8d39359..358c5e8 100644
--- a/qc_app/templates/parse_results.html
+++ b/qc_app/templates/parse_results.html
@@ -6,6 +6,10 @@
{%block contents%}
<h1 class="heading">{{job_name}}: parse results</h2>
+{%if user_aborted%}
+<span class="alert-warning">Job aborted by the user</span>
+{%endif%}
+
{{errors_display(errors, "No errors found in the file", "We found the following errors")}}
{%endblock%}
diff --git a/quality_control/parsing.py b/quality_control/parsing.py
index 5b1809b..9f8e8ee 100644
--- a/quality_control/parsing.py
+++ b/quality_control/parsing.py
@@ -4,7 +4,7 @@ import os
import collections
from enum import Enum
from functools import partial
-from typing import Union, Iterable, Generator, Callable
+from typing import Iterable, Generator, Callable, Optional
import quality_control.average as avg
import quality_control.standard_error as se
@@ -65,7 +65,8 @@ def se_errors(line_number, fields):
def collect_errors(
filepath: str, filetype: FileType, strains: list,
- update_progress: Union[Callable, None] = None) -> Generator:
+ update_progress: Optional[Callable] = None,
+ user_aborted: Optional[Callable] = lambda: False) -> Generator:
"""Run checks against file and collect all the errors"""
errors = tuple()
def __process_errors__(line_number, line, error_checker_fn, errors = tuple()):
@@ -80,6 +81,9 @@ def collect_errors(
with open(filepath, encoding="utf-8") as input_file:
for line_number, line in enumerate(input_file, start=1):
+ if user_aborted():
+ break
+
if line_number == 1:
for error in __process_errors__(
line_number, line, partial(header_errors, strains=strains),
diff --git a/scripts/worker.py b/scripts/worker.py
index 6fab9f9..0ef5ae5 100644
--- a/scripts/worker.py
+++ b/scripts/worker.py
@@ -54,6 +54,16 @@ def stream_error(redis_conn, job_id, error):
redis_conn.hset(
job_id, key="errors", value=jsonpickle.encode(errors + (error,)))
+def make_user_aborted(redis_conn, job_id):
+ def __aborted__():
+ user_aborted = bool(int(
+ redis_conn.hget(name=job_id, key="user_aborted") or "0"))
+ if user_aborted:
+ redis_conn.hset(name=job_id, key="status", value="aborted")
+
+ return user_aborted
+ return __aborted__
+
def main():
args = process_cli_arguments()
if args is None:
@@ -79,7 +89,8 @@ def main():
error_count = 0
for error in collect_errors(
- filepath, filetype, strains, progress_indicator):
+ filepath, filetype, strains, progress_indicator,
+ make_user_aborted(redis_conn, args.job_id)):
stream_error(redis_conn, args.job_id, error)
if count > 0: