aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--uploader/phenotypes/views.py127
1 files changed, 64 insertions, 63 deletions
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index 8ae3ae4..400baa6 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -4,6 +4,7 @@ import uuid
import json
import datetime
from pathlib import Path
+from zipfile import ZipFile
from functools import wraps
from logging import INFO, ERROR, DEBUG, FATAL, CRITICAL, WARNING
@@ -14,6 +15,7 @@ from gn_libs.mysqldb import database_connection
from flask import (flash,
request,
url_for,
+ jsonify,
redirect,
Blueprint,
current_app as app)
@@ -307,8 +309,7 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable=
population_id=population["Id"]))
-def process_phenotypes_rqtl2_bundle(
- rconn: Redis, species: dict, population: dict, dataset: dict):
+def process_phenotypes_rqtl2_bundle(error_uri):
"""Process phenotypes from the uploaded R/qtl2 bundle."""
_redisuri = app.config["REDIS_URL"]
_sqluri = app.config["SQL_URI"]
@@ -317,59 +318,20 @@ def process_phenotypes_rqtl2_bundle(
phenobundle = save_file(request.files["phenotypes-bundle"],
Path(app.config["UPLOAD_FOLDER"]))
rqc.validate_bundle(phenobundle)
+ return phenobundle
except AssertionError as _aerr:
app.logger.debug("File upload error!", exc_info=True)
flash("Expected a zipped bundle of files with phenotypes' "
"information.",
"alert-danger")
- return add_phenos_uri
+ return error_uri
except rqe.RQTLError as rqtlerr:
app.logger.debug("Bundle validation error!", exc_info=True)
flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger")
- return add_phenos_uri
-
- _jobid = uuid.uuid4()
- _namespace = jobs.jobsnamespace()
- _ttl_seconds = app.config["JOBS_TTL_SECONDS"]
- _job = jobs.launch_job(
- jobs.initialise_job(
- rconn,
- _namespace,
- str(_jobid),
- [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri,
- _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]),
- str(population["Id"]),
- # str(dataset["Id"]),
- str(phenobundle),
- "--loglevel",
- {
- INFO: "INFO",
- ERROR: "ERROR",
- DEBUG: "DEBUG",
- FATAL: "FATAL",
- CRITICAL: "CRITICAL",
- WARNING: "WARNING"
- }[app.logger.getEffectiveLevel()],
- "--redisexpiry",
- str(_ttl_seconds)], "phenotype_qc", _ttl_seconds,
- {"job-metadata": json.dumps({
- "speciesid": species["SpeciesId"],
- "populationid": population["Id"],
- "datasetid": dataset["Id"],
- "bundle": str(phenobundle.absolute())})}),
- _redisuri,
- f"{app.config['UPLOAD_FOLDER']}/job_errors")
-
- app.logger.debug("JOB DETAILS: %s", _job)
-
- return redirect(url_for("species.populations.phenotypes.job_status",
- species_id=species["SpeciesId"],
- population_id=population["Id"],
- dataset_id=dataset["Id"],
- job_id=str(_job["jobid"])))
-
-
-def process_phenotypes_individual_files(rconn, species, population, dataset, error_uri):
+ return error_uri
+
+
+def process_phenotypes_individual_files(error_uri):
"""Process the uploaded individual files."""
form = request.form
cdata = {
@@ -377,11 +339,9 @@ def process_phenotypes_individual_files(rconn, species, population, dataset, err
"comment.char": form["file-comment-character"],
"na.strings": form["file-na"].split(" "),
}
- with ZipFile(
- Path(app.config["UPLOAD_FOLDER"],
- f"{str(uuid.uuid4()).replace('-', '')}.zip"),
- mode="w"
- ) as zfile:
+ bundlepath = Path(app.config["UPLOAD_FOLDER"],
+ f"{str(uuid.uuid4()).replace('-', '')}.zip")
+ with ZipFile(bundlepath,mode="w") as zfile:
for rqtlkey, formkey in (("phenocovar", "phenotype-descriptions"),
("pheno", "phenotype-data"),
("phenose", "phenotype-se"),
@@ -410,12 +370,7 @@ def process_phenotypes_individual_files(rconn, species, population, dataset, err
zfile.writestr("control_data.json", data=json.dumps(cdata, indent=2))
- ## Convert files and settings to R/qtl2 bundle
- ## Use same processing as R/qtl2 bundle (after some refactoring)
- print(f"FORM: {request.form}")
- print(f"FILES: {request.files}")
- print(f"CDATA: {cdata}")
- return "Would process individual files…"
+ return bundlepath
@phenotypesbp.route(
@@ -461,11 +416,57 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p
use_bundle=use_bundle,
activelink="add-phenotypes")
- if use_bundle:
- return process_phenotypes_rqtl2_bundle(
- rconn, species, population, dataset)
- return process_phenotypes_individual_files(
- rconn, species, population, dataset)
+ phenobundle = (process_phenotypes_rqtl2_bundle(add_phenos_uri)
+ if use_bundle else
+ process_phenotypes_individual_files(add_phenos_uri))
+
+ _jobid = uuid.uuid4()
+ _namespace = jobs.jobsnamespace()
+ _ttl_seconds = app.config["JOBS_TTL_SECONDS"]
+ _job = jobs.launch_job(
+ jobs.initialise_job(
+ rconn,
+ _namespace,
+ str(_jobid),
+ [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri,
+ _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]),
+ str(population["Id"]),
+ # str(dataset["Id"]),
+ str(phenobundle),
+ "--loglevel",
+ {
+ INFO: "INFO",
+ ERROR: "ERROR",
+ DEBUG: "DEBUG",
+ FATAL: "FATAL",
+ CRITICAL: "CRITICAL",
+ WARNING: "WARNING"
+ }[app.logger.getEffectiveLevel()],
+ "--redisexpiry",
+ str(_ttl_seconds)], "phenotype_qc", _ttl_seconds,
+ {"job-metadata": json.dumps({
+ "speciesid": species["SpeciesId"],
+ "populationid": population["Id"],
+ "datasetid": dataset["Id"],
+ "bundle": str(phenobundle.absolute())})}),
+ _redisuri,
+ f"{app.config['UPLOAD_FOLDER']}/job_errors")
+
+ app.logger.debug("JOB DETAILS: %s", _job)
+ jobstatusuri = url_for("species.populations.phenotypes.job_status",
+ species_id=species["SpeciesId"],
+ population_id=population["Id"],
+ dataset_id=dataset["Id"],
+ job_id=str(_job["jobid"]))
+ return ((jsonify({
+ "redirect-to": jobstatusuri,
+ "statuscode": 200,
+ "message": ("Follow the 'redirect-to' URI to see the state "
+ "of the quality-control job started for your "
+ "uploaded files.")
+ }), 200)
+ if request.form.get("resumable-upload", False) else
+ redirect(jobstatusuri))
@phenotypesbp.route(