aboutsummaryrefslogtreecommitdiff
path: root/uploader
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2024-12-13 15:53:41 -0600
committerFrederick Muriuki Muriithi2024-12-13 15:53:41 -0600
commitb8d987f87a37e557a6e20a1b68022e6efe10be76 (patch)
treebf6e07c1ceeb0e1af4d5a2e12ca85339e7b04b26 /uploader
parentdea29486883b5b13369a6d2c5392ec01a24555ee (diff)
downloadgn-uploader-b8d987f87a37e557a6e20a1b68022e6efe10be76.tar.gz
Separate bundle and individual files processing
Move the code processing the R/qtl2 bundle into a separate function and call it. Create a new (currently, failing) function to process the individual files.
Diffstat (limited to 'uploader')
-rw-r--r--uploader/phenotypes/views.py135
-rw-r--r--uploader/templates/phenotypes/add-phenotypes-base.html3
2 files changed, 80 insertions, 58 deletions
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index c4aa67a..f10ba09 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -307,6 +307,76 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable=
population_id=population["Id"]))
+def process_phenotypes_rqtl2_bundle(
+ rconn: Redis, species: dict, population: dict, dataset: dict):
+ """Process phenotypes from the uploaded R/qtl2 bundle."""
+ _redisuri = app.config["REDIS_URL"]
+ _sqluri = app.config["SQL_URI"]
+ try:
+ ## Handle huge files here...
+ phenobundle = save_file(request.files["phenotypes-bundle"],
+ Path(app.config["UPLOAD_FOLDER"]))
+ rqc.validate_bundle(phenobundle)
+ except AssertionError as _aerr:
+ app.logger.debug("File upload error!", exc_info=True)
+ flash("Expected a zipped bundle of files with phenotypes' "
+ "information.",
+ "alert-danger")
+ return add_phenos_uri
+ except rqe.RQTLError as rqtlerr:
+ app.logger.debug("Bundle validation error!", exc_info=True)
+ flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger")
+ return add_phenos_uri
+
+ _jobid = uuid.uuid4()
+ _namespace = jobs.jobsnamespace()
+ _ttl_seconds = app.config["JOBS_TTL_SECONDS"]
+ _job = jobs.launch_job(
+ jobs.initialise_job(
+ rconn,
+ _namespace,
+ str(_jobid),
+ [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri,
+ _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]),
+ str(population["Id"]),
+ # str(dataset["Id"]),
+ str(phenobundle),
+ "--loglevel",
+ {
+ INFO: "INFO",
+ ERROR: "ERROR",
+ DEBUG: "DEBUG",
+ FATAL: "FATAL",
+ CRITICAL: "CRITICAL",
+ WARNING: "WARNING"
+ }[app.logger.getEffectiveLevel()],
+ "--redisexpiry",
+ str(_ttl_seconds)], "phenotype_qc", _ttl_seconds,
+ {"job-metadata": json.dumps({
+ "speciesid": species["SpeciesId"],
+ "populationid": population["Id"],
+ "datasetid": dataset["Id"],
+ "bundle": str(phenobundle.absolute())})}),
+ _redisuri,
+ f"{app.config['UPLOAD_FOLDER']}/job_errors")
+
+ app.logger.debug("JOB DETAILS: %s", _job)
+
+ return redirect(url_for("species.populations.phenotypes.job_status",
+ species_id=species["SpeciesId"],
+ population_id=population["Id"],
+ dataset_id=dataset["Id"],
+ job_id=str(_job["jobid"])))
+
+
+def process_phenotypes_individual_files(rconn, species, population, dataset):
+ """Process the uploaded individual files."""
+ ## Handle huge file uploads here...
+ ## Convert files and settings to R/qtl2 bundle
+ ## Use same processing as R/qtl2 bundle (after some refactoring)
+ raise NotImplementedError("Implement this!")
+
+
@phenotypesbp.route(
"<int:species_id>/populations/<int:population_id>/phenotypes/datasets"
"/<int:dataset_id>/add-phenotypes",
@@ -318,6 +388,7 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable=
redirect_uri="species.populations.phenotypes.list_datasets")
def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# pylint: disable=[unused-argument, too-many-locals]
"""Add one or more phenotypes to the dataset."""
+ use_bundle = request.args.get("use_bundle", "").lower() == "true"
add_phenos_uri = redirect(url_for(
"species.populations.phenotypes.add_phenotypes",
species_id=species["SpeciesId"],
@@ -333,8 +404,7 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p
today = datetime.date.today()
return render_template(
("phenotypes/add-phenotypes-with-rqtl2-bundle.html"
- if request.args.get("use_bundle", "").lower() == "true"
- else "phenotypes/add-phenotypes-raw-files.html"),
+ if use_bundle else "phenotypes/add-phenotypes-raw-files.html"),
species=species,
population=population,
dataset=dataset,
@@ -347,63 +417,14 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p
current_year=int(today.strftime("%Y")),
families_with_se_and_n=(
"Reference Populations (replicate average, SE, N)",),
+ use_bundle=use_bundle,
activelink="add-phenotypes")
- try:
- ## Handle huge files here...
- phenobundle = save_file(request.files["phenotypes-bundle"],
- Path(app.config["UPLOAD_FOLDER"]))
- rqc.validate_bundle(phenobundle)
- except AssertionError as _aerr:
- app.logger.debug("File upload error!", exc_info=True)
- flash("Expected a zipped bundle of files with phenotypes' "
- "information.",
- "alert-danger")
- return add_phenos_uri
- except rqe.RQTLError as rqtlerr:
- app.logger.debug("Bundle validation error!", exc_info=True)
- flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger")
- return add_phenos_uri
-
- _jobid = uuid.uuid4()
- _namespace = jobs.jobsnamespace()
- _ttl_seconds = app.config["JOBS_TTL_SECONDS"]
- _job = jobs.launch_job(
- jobs.initialise_job(
- rconn,
- _namespace,
- str(_jobid),
- [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri,
- _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]),
- str(population["Id"]),
- # str(dataset["Id"]),
- str(phenobundle),
- "--loglevel",
- {
- INFO: "INFO",
- ERROR: "ERROR",
- DEBUG: "DEBUG",
- FATAL: "FATAL",
- CRITICAL: "CRITICAL",
- WARNING: "WARNING"
- }[app.logger.getEffectiveLevel()],
- "--redisexpiry",
- str(_ttl_seconds)], "phenotype_qc", _ttl_seconds,
- {"job-metadata": json.dumps({
- "speciesid": species["SpeciesId"],
- "populationid": population["Id"],
- "datasetid": dataset["Id"],
- "bundle": str(phenobundle.absolute())})}),
- _redisuri,
- f"{app.config['UPLOAD_FOLDER']}/job_errors")
-
- app.logger.debug("JOB DETAILS: %s", _job)
-
- return redirect(url_for("species.populations.phenotypes.job_status",
- species_id=species["SpeciesId"],
- population_id=population["Id"],
- dataset_id=dataset["Id"],
- job_id=str(_job["jobid"])))
+ if use_bundle:
+ return process_phenotypes_rqtl2_bundle(
+ rconn, species, population, dataset)
+ return process_phenotypes_individual_files(
+ rconn, species, population, dataset)
@phenotypesbp.route(
diff --git a/uploader/templates/phenotypes/add-phenotypes-base.html b/uploader/templates/phenotypes/add-phenotypes-base.html
index b3a53b0..7ad1e09 100644
--- a/uploader/templates/phenotypes/add-phenotypes-base.html
+++ b/uploader/templates/phenotypes/add-phenotypes-base.html
@@ -30,7 +30,8 @@
action="{{url_for('species.populations.phenotypes.add_phenotypes',
species_id=species.SpeciesId,
population_id=population.Id,
- dataset_id=dataset.Id)}}">
+ dataset_id=dataset.Id,
+ use_bundle=use_bundle)}}">
<legend>Add New Phenotypes</legend>
<div class="form-text help-block">