diff options
Diffstat (limited to 'uploader/phenotypes/views.py')
-rw-r--r-- | uploader/phenotypes/views.py | 135 |
1 files changed, 78 insertions, 57 deletions
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py index c4aa67a..f10ba09 100644 --- a/uploader/phenotypes/views.py +++ b/uploader/phenotypes/views.py @@ -307,6 +307,76 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable= population_id=population["Id"])) +def process_phenotypes_rqtl2_bundle( + rconn: Redis, species: dict, population: dict, dataset: dict): + """Process phenotypes from the uploaded R/qtl2 bundle.""" + _redisuri = app.config["REDIS_URL"] + _sqluri = app.config["SQL_URI"] + try: + ## Handle huge files here... + phenobundle = save_file(request.files["phenotypes-bundle"], + Path(app.config["UPLOAD_FOLDER"])) + rqc.validate_bundle(phenobundle) + except AssertionError as _aerr: + app.logger.debug("File upload error!", exc_info=True) + flash("Expected a zipped bundle of files with phenotypes' " + "information.", + "alert-danger") + return add_phenos_uri + except rqe.RQTLError as rqtlerr: + app.logger.debug("Bundle validation error!", exc_info=True) + flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger") + return add_phenos_uri + + _jobid = uuid.uuid4() + _namespace = jobs.jobsnamespace() + _ttl_seconds = app.config["JOBS_TTL_SECONDS"] + _job = jobs.launch_job( + jobs.initialise_job( + rconn, + _namespace, + str(_jobid), + [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri, + _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]), + str(population["Id"]), + # str(dataset["Id"]), + str(phenobundle), + "--loglevel", + { + INFO: "INFO", + ERROR: "ERROR", + DEBUG: "DEBUG", + FATAL: "FATAL", + CRITICAL: "CRITICAL", + WARNING: "WARNING" + }[app.logger.getEffectiveLevel()], + "--redisexpiry", + str(_ttl_seconds)], "phenotype_qc", _ttl_seconds, + {"job-metadata": json.dumps({ + "speciesid": species["SpeciesId"], + "populationid": population["Id"], + "datasetid": dataset["Id"], + "bundle": str(phenobundle.absolute())})}), + _redisuri, + f"{app.config['UPLOAD_FOLDER']}/job_errors") + + app.logger.debug("JOB DETAILS: %s", _job) + + return redirect(url_for("species.populations.phenotypes.job_status", + species_id=species["SpeciesId"], + population_id=population["Id"], + dataset_id=dataset["Id"], + job_id=str(_job["jobid"]))) + + +def process_phenotypes_individual_files(rconn, species, population, dataset): + """Process the uploaded individual files.""" + ## Handle huge file uploads here... + ## Convert files and settings to R/qtl2 bundle + ## Use same processing as R/qtl2 bundle (after some refactoring) + raise NotImplementedError("Implement this!") + + @phenotypesbp.route( "<int:species_id>/populations/<int:population_id>/phenotypes/datasets" "/<int:dataset_id>/add-phenotypes", @@ -318,6 +388,7 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable= redirect_uri="species.populations.phenotypes.list_datasets") def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# pylint: disable=[unused-argument, too-many-locals] """Add one or more phenotypes to the dataset.""" + use_bundle = request.args.get("use_bundle", "").lower() == "true" add_phenos_uri = redirect(url_for( "species.populations.phenotypes.add_phenotypes", species_id=species["SpeciesId"], @@ -333,8 +404,7 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p today = datetime.date.today() return render_template( ("phenotypes/add-phenotypes-with-rqtl2-bundle.html" - if request.args.get("use_bundle", "").lower() == "true" - else "phenotypes/add-phenotypes-raw-files.html"), + if use_bundle else "phenotypes/add-phenotypes-raw-files.html"), species=species, population=population, dataset=dataset, @@ -347,63 +417,14 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p current_year=int(today.strftime("%Y")), families_with_se_and_n=( "Reference Populations (replicate average, SE, N)",), + use_bundle=use_bundle, activelink="add-phenotypes") - try: - ## Handle huge files here... - phenobundle = save_file(request.files["phenotypes-bundle"], - Path(app.config["UPLOAD_FOLDER"])) - rqc.validate_bundle(phenobundle) - except AssertionError as _aerr: - app.logger.debug("File upload error!", exc_info=True) - flash("Expected a zipped bundle of files with phenotypes' " - "information.", - "alert-danger") - return add_phenos_uri - except rqe.RQTLError as rqtlerr: - app.logger.debug("Bundle validation error!", exc_info=True) - flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger") - return add_phenos_uri - - _jobid = uuid.uuid4() - _namespace = jobs.jobsnamespace() - _ttl_seconds = app.config["JOBS_TTL_SECONDS"] - _job = jobs.launch_job( - jobs.initialise_job( - rconn, - _namespace, - str(_jobid), - [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri, - _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]), - str(population["Id"]), - # str(dataset["Id"]), - str(phenobundle), - "--loglevel", - { - INFO: "INFO", - ERROR: "ERROR", - DEBUG: "DEBUG", - FATAL: "FATAL", - CRITICAL: "CRITICAL", - WARNING: "WARNING" - }[app.logger.getEffectiveLevel()], - "--redisexpiry", - str(_ttl_seconds)], "phenotype_qc", _ttl_seconds, - {"job-metadata": json.dumps({ - "speciesid": species["SpeciesId"], - "populationid": population["Id"], - "datasetid": dataset["Id"], - "bundle": str(phenobundle.absolute())})}), - _redisuri, - f"{app.config['UPLOAD_FOLDER']}/job_errors") - - app.logger.debug("JOB DETAILS: %s", _job) - - return redirect(url_for("species.populations.phenotypes.job_status", - species_id=species["SpeciesId"], - population_id=population["Id"], - dataset_id=dataset["Id"], - job_id=str(_job["jobid"]))) + if use_bundle: + return process_phenotypes_rqtl2_bundle( + rconn, species, population, dataset) + return process_phenotypes_individual_files( + rconn, species, population, dataset) @phenotypesbp.route( |