diff options
33 files changed, 908 insertions, 181 deletions
diff --git a/.guix-channel b/.guix-channel index 54206b2..f1a8fa6 100644 --- a/.guix-channel +++ b/.guix-channel @@ -35,11 +35,12 @@ (channel (name guix-bioinformatics) (url "https://git.genenetwork.org/guix-bioinformatics") - (commit "903465c85c9b2ae28480b236c3364da873ca8f51")) + (commit "9b0955f14ec725990abb1f6af3b9f171e4943f77")) (channel (name guix-past) (url "https://codeberg.org/guix-science/guix-past") (branch "master") + (commit "473c942b509ab3ead35159d27dfbf2031a36cd4d") (introduction (channel-introduction (version 0) @@ -50,6 +51,7 @@ (name guix-rust-past-crates) (url "https://codeberg.org/guix/guix-rust-past-crates.git") (branch "trunk") + (commit "b8b7ffbd1cec9f56f93fae4da3a74163bbc9c570") (introduction (channel-introduction (version 0) diff --git a/qc_app/default_settings.py b/qc_app/default_settings.py index 7a9da0f..7bb0bf8 100644 --- a/qc_app/default_settings.py +++ b/qc_app/default_settings.py @@ -7,7 +7,7 @@ import os LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING") SECRET_KEY = b"<Please! Please! Please! Change This!>" -UPLOAD_FOLDER = "/tmp/qc_app_files" +UPLOADS_DIRECTORY = "/tmp/qc_app_files" REDIS_URL = "redis://" JOBS_TTL_SECONDS = 1209600 # 14 days GNQC_REDIS_PREFIX="GNQC" diff --git a/scripts/phenotypes/__init__.py b/scripts/phenotypes/__init__.py new file mode 100644 index 0000000..73ad839 --- /dev/null +++ b/scripts/phenotypes/__init__.py @@ -0,0 +1 @@ +"Scripts for dealing with phenotypes." diff --git a/scripts/phenotypes/delete_phenotypes.py b/scripts/phenotypes/delete_phenotypes.py new file mode 100644 index 0000000..461f3ec --- /dev/null +++ b/scripts/phenotypes/delete_phenotypes.py @@ -0,0 +1,173 @@ +"""Delete phenotypes.""" +import sys +import logging +from pathlib import Path +from typing import Optional +from urllib.parse import urljoin +from argparse import Namespace, ArgumentParser + +import requests +from MySQLdb.cursors import DictCursor, BaseCursor + +from gn_libs.mysqldb import database_connection + +from uploader.phenotypes.models import delete_phenotypes +from scripts.cli.logging import setup_logging +from scripts.cli.options import (add_logging, + add_mariadb_uri, + add_population_id) + +logger = logging.getLogger(__name__) + +def read_xref_ids_file(filepath: Optional[Path]) -> tuple[int, ...]: + """Read the phenotypes' cross-reference IDS from file.""" + if filepath is None: + return tuple() + + logger.debug("Using file '%s' to retrieve XREF IDs for deletion.", + filepath.name) + _ids: tuple[int, ...] = tuple() + with filepath.open(mode="r") as infile: + for line in infile.readlines(): + try: + _ids += (int(line.strip()),) + except TypeError: + pass + + return _ids + + +def fetch_all_xref_ids( + cursor: BaseCursor, population_id: int) -> tuple[int, ...]: + """Fetch all cross-reference IDs.""" + cursor.execute("SELECT Id FROM PublishXRef WHERE InbredSetId=%s", + (population_id,)) + return tuple(int(row["Id"]) for row in cursor.fetchall()) + + +def update_auth( + auth_details: tuple[str, str], + species_id: int, + population_id: int, + dataset_id: int, + xref_ids: tuple[int, ...] = tuple() +): + """Update the authorisation server: remove items to delete.""" + authserver, token = auth_details + resp = requests.post( + urljoin(authserver, + (f"/auth/data/phenotypes/{species_id}/{population_id}" + f"/{dataset_id}/delete")), + timeout=(9.13, 20), + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json" + }, + json={"xref_ids": xref_ids}) + resp.raise_for_status() + + +def delete_the_phenotypes( + cursor: BaseCursor, + population_id: int, + xref_ids: tuple[int, ...] = tuple()) -> int: + """Process and delete the phenotypes.""" + delete_phenotypes(cursor, population_id, xref_ids) + + return 0 + +if __name__ == "__main__": + def parse_args() -> Namespace: + """Parse CLI arguments.""" + parser = add_logging( + add_population_id( + add_mariadb_uri( + ArgumentParser( + prog="delete-phenotypes", + description=( + "Script to delete phenotypes from the database."))))) + parser.add_argument( + "dataset_id", + metavar="DATASET-ID", + type=int, + help="The dataset identifier for phenotypes to delete.") + parser.add_argument( + "auth_server_uri", + metavar="AUTH-SERVER-URI", + type=str, + help="URI to the authorisation server.") + parser.add_argument( + "auth_token", + metavar="AUTH-TOKEN", + type=str, + help=("Token to use to update the authorisation system with the " + "deletions done.")) + parser.add_argument( + "--xref_ids_file", + metavar="XREF-IDS-FILE", + type=Path, + help=("Path to a file with phenotypes cross-reference IDs to " + "delete.")) + parser.add_argument( + "--delete-all", + action="store_true", + help=("If no 'XREF-IDS-FILE' is provided, this flag determines " + "whether or not all the phenotypes for the given population " + "will be deleted.")) + return parser.parse_args() + + + def main(): + """The `delete-phenotypes` script's entry point.""" + args = parse_args() + setup_logging(logger, args.log_level.upper(), tuple()) + with (database_connection(args.db_uri) as conn, + conn.cursor(cursorclass=DictCursor) as cursor): + xref_ids = read_xref_ids_file(args.xref_ids_file) + try: + assert not (len(xref_ids) > 0 and args.delete_all) + xref_ids = (fetch_all_xref_ids(cursor, args.population_id) + if args.delete_all else xref_ids) + logger.debug("Will delete %s phenotypes and related data", + len(xref_ids)) + if len(xref_ids) == 0: + print("No cross-reference IDs were provided. Aborting.") + return 0 + + print("Updating authorisations: ", end="") + update_auth((args.auth_server_uri, args.auth_token), + args.species_id, + args.population_id, + args.dataset_id, + xref_ids) + print("OK.") + print("Deleting the data: ", end="") + delete_phenotypes(cursor, args.population_id, xref_ids=xref_ids) + print("OK.") + if args.xref_ids_file is not None: + print("Deleting temporary file: ", end="") + args.xref_ids_file.unlink() + print("OK.") + + return 0 + except AssertionError: + logger.error( + "'DELETE-ALL' and 'XREF-IDS' are mutually exclusive. " + "If you specify the list of XREF-IDS (in a file) to delete " + "and also specify to 'DELETE-ALL' phenotypes in the " + "population, we have no way of knowing what it is you want.") + return 1 + except requests.exceptions.HTTPError as _exc: + resp = _exc.response + resp_data = resp.json() + logger.debug("%s: %s", + resp_data["error"], + resp_data["error_description"], + exc_info=True) + return 1 + except Exception as _exc:# pylint: disable=[broad-exception-caught] + logger.debug("Failed while attempting to delete phenotypes.", + exc_info=True) + return 1 + + sys.exit(main()) diff --git a/scripts/run_qtlreaper.py b/scripts/run_qtlreaper.py index 7d58402..54e5d45 100644 --- a/scripts/run_qtlreaper.py +++ b/scripts/run_qtlreaper.py @@ -169,7 +169,7 @@ def dispatch(args: Namespace) -> int: logger.info("Successfully computed p values for %s traits.", len(_traitsdata)) return 0 except FileNotFoundError as fnf: - logger.error(", ".join(fnf.args), exc_info=False) + logger.error(", ".join(str(arg) for arg in fnf.args), exc_info=False) except AssertionError as aserr: logger.error(", ".join(aserr.args), exc_info=False) except Exception as _exc:# pylint: disable=[broad-exception-caught] diff --git a/tests/conftest.py b/tests/conftest.py index a716c52..2009aab 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -183,7 +183,7 @@ def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, jobs_prefix, def uploads_dir(client): # pylint: disable=[redefined-outer-name] """Returns the configured, uploads directory, creating it if it does not exist.""" - the_dir = client.application.config["UPLOAD_FOLDER"] + the_dir = client.application.config["UPLOADS_DIRECTORY"] if not os.path.exists(the_dir): os.mkdir(the_dir) diff --git a/tests/test_instance_dir/config.py b/tests/test_instance_dir/config.py index 2ee569b..f04b3df 100644 --- a/tests/test_instance_dir/config.py +++ b/tests/test_instance_dir/config.py @@ -6,6 +6,6 @@ import os LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING") SECRET_KEY = b"<Please! Please! Please! Change This!>" -UPLOAD_FOLDER = "/tmp/qc_app_files" +UPLOADS_DIRECTORY = "/tmp/qc_app_files" REDIS_URL = "redis://" JOBS_TTL_SECONDS = 600 # 10 minutes diff --git a/tests/uploader/test_parse.py b/tests/uploader/test_parse.py index 20c75b7..56e1b41 100644 --- a/tests/uploader/test_parse.py +++ b/tests/uploader/test_parse.py @@ -50,7 +50,7 @@ def test_parse_with_existing_uploaded_file( assert the_job["command"] == " ".join([ sys.executable, "-m", "scripts.validate_file", db_url, redis_url, jobs_prefix, job_id, "--redisexpiry", str(redis_ttl), str(speciesid), - filetype, f"{client.application.config['UPLOAD_FOLDER']}/{filename}"]) + filetype, f"{client.application.config['UPLOADS_DIRECTORY']}/{filename}"]) @pytest.mark.parametrize( "filename,uri,error_msgs", diff --git a/uploader/__init__.py b/uploader/__init__.py index 0ba1f81..46689c5 100644 --- a/uploader/__init__.py +++ b/uploader/__init__.py @@ -73,6 +73,28 @@ def setup_modules_logging(app_logger, modules): _logger.setLevel(loglevel) +def __setup_scratch_directory__(app: Flask) -> Flask: + app.config["SCRATCH_DIRECTORY"] = Path( + app.config["SCRATCH_DIRECTORY"]).absolute() + return app + +def __setup_upload_directory__(app: Flask) -> Flask: + if app.config.get("UPLOADS_DIRECTORY", "").strip() == "": + app.config["UPLOADS_DIRECTORY"] = app.config[ + "SCRATCH_DIRECTORY"].joinpath("uploads") + else: + app.config["UPLOADS_DIRECTORY"] = Path( + app.config["UPLOADS_DIRECTORY"].strip()).absolute() + + return app + + +def update_unspecified_defaults(app: Flask) -> Flask: + """Setup the defaults for necessary configurations that do not have values + specified for them.""" + return __setup_upload_directory__(__setup_scratch_directory__(app)) + + def create_app(config: Optional[dict] = None): """The application factory. @@ -100,6 +122,7 @@ def create_app(config: Optional[dict] = None): # Silently ignore secrets if the file does not exist. app.config.from_pyfile(secretsfile) app.config.update(config) # Override everything with passed in config + update_unspecified_defaults(app) ### END: Application configuration app.config["SESSION_CACHELIB"] = FileSystemCache( diff --git a/uploader/configutils.py b/uploader/configutils.py new file mode 100644 index 0000000..c5db50b --- /dev/null +++ b/uploader/configutils.py @@ -0,0 +1,13 @@ +"""Functions to fetch settings.""" +from pathlib import Path + +def fetch_setting(app, setting): + """Fetch a specified configuration `setting` from the `app` object.""" + return app.config[setting] + +def uploads_dir(app) -> Path: + """Fetch the uploads directory""" + _dir = Path(fetch_setting(app, "UPLOADS_DIRECTORY")).absolute() + assert _dir.exists() and _dir.is_dir(), ( + f"'{_dir}' needs to be an existing directory.") + return _dir diff --git a/uploader/default_settings.py b/uploader/default_settings.py index 52cdad5..6381a67 100644 --- a/uploader/default_settings.py +++ b/uploader/default_settings.py @@ -5,8 +5,14 @@ actual configuration file used for the production and staging systems. LOG_LEVEL = "WARNING" SECRET_KEY = b"<Please! Please! Please! Change This!>" -UPLOAD_FOLDER = "/tmp/qc_app_files" -TEMPORARY_DIRECTORY = "/tmp/gn-uploader-tmpdir" + +# Scratch directory and uploads: +# *** The scratch directory *** +# We avoid `/tmp` entirely for the scratch directory to avoid shared global +# mutable state with other users/applications/processes. +SCRATCH_DIRECTORY = "~/tmp/gn-uploader-scratchdir" +UPLOADS_DIRECTORY = ""# If not set, will be under scratch directory. + REDIS_URL = "redis://" JOBS_TTL_SECONDS = 1209600 # 14 days GNQC_REDIS_PREFIX="gn-uploader" diff --git a/uploader/expression_data/dbinsert.py b/uploader/expression_data/dbinsert.py index 6d8ce80..7040698 100644 --- a/uploader/expression_data/dbinsert.py +++ b/uploader/expression_data/dbinsert.py @@ -94,7 +94,7 @@ def select_platform(): job = jobs.job(rconn, jobs.jobsnamespace(), job_id) if job: filename = job["filename"] - filepath = f"{app.config['UPLOAD_FOLDER']}/{filename}" + filepath = f"{app.config['UPLOADS_DIRECTORY']}/{filename}" if os.path.exists(filepath): default_species = 1 gchips = genechips() @@ -367,7 +367,7 @@ def insert_data(): assert form.get("datasetid"), "dataset" filename = form["filename"] - filepath = f"{app.config['UPLOAD_FOLDER']}/{filename}" + filepath = f"{app.config['UPLOADS_DIRECTORY']}/{filename}" redisurl = app.config["REDIS_URL"] if os.path.exists(filepath): with Redis.from_url(redisurl, decode_responses=True) as rconn: @@ -377,7 +377,7 @@ def insert_data(): form["species"], form["genechipid"], form["datasetid"], app.config["SQL_URI"], redisurl, app.config["JOBS_TTL_SECONDS"]), - redisurl, f"{app.config['UPLOAD_FOLDER']}/job_errors") + redisurl, f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return redirect(url_for("dbinsert.insert_status", job_id=job["jobid"])) return render_error(f"File '{filename}' no longer exists.") diff --git a/uploader/expression_data/views.py b/uploader/expression_data/views.py index 0b318b7..0e9b072 100644 --- a/uploader/expression_data/views.py +++ b/uploader/expression_data/views.py @@ -162,7 +162,7 @@ def upload_file(species_id: int, population_id: int): species=species, population=population) - upload_dir = app.config["UPLOAD_FOLDER"] + upload_dir = app.config["UPLOADS_DIRECTORY"] request_errors = errors(request) if request_errors: for error in request_errors: @@ -225,7 +225,7 @@ def parse_file(species_id: int, population_id: int): _errors = True if filename: - filepath = os.path.join(app.config["UPLOAD_FOLDER"], filename) + filepath = os.path.join(app.config["UPLOADS_DIRECTORY"], filename) if not os.path.exists(filepath): flash("Selected file does not exist (any longer)", "alert-danger") _errors = True @@ -241,7 +241,7 @@ def parse_file(species_id: int, population_id: int): species_id, filepath, filetype,# type: ignore[arg-type] app.config["JOBS_TTL_SECONDS"]), redisurl, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return redirect(url_for("species.populations.expression-data.parse_status", species_id=species_id, @@ -263,7 +263,7 @@ def parse_status(species_id: int, population_id: int, job_id: str): return render_template("no_such_job.html", job_id=job_id), 400 error_filename = jobs.error_filename( - job_id, f"{app.config['UPLOAD_FOLDER']}/job_errors") + job_id, f"{app.config['UPLOADS_DIRECTORY']}/job_errors") if os.path.exists(error_filename): stat = os.stat(error_filename) if stat.st_size > 0: @@ -345,7 +345,7 @@ def fail(species_id: int, population_id: int, job_id: str): if job: error_filename = jobs.error_filename( - job_id, f"{app.config['UPLOAD_FOLDER']}/job_errors") + job_id, f"{app.config['UPLOADS_DIRECTORY']}/job_errors") if os.path.exists(error_filename): stat = os.stat(error_filename) if stat.st_size > 0: diff --git a/uploader/files/chunks.py b/uploader/files/chunks.py index c4360b5..f63f32f 100644 --- a/uploader/files/chunks.py +++ b/uploader/files/chunks.py @@ -5,6 +5,8 @@ from typing import Iterator from flask import current_app as app from werkzeug.utils import secure_filename +from uploader.configutils import uploads_dir + def chunked_binary_read(filepath: Path, chunksize: int = 2048) -> Iterator: """Read a file in binary mode in chunks.""" @@ -29,4 +31,4 @@ def chunks_directory(uniqueidentifier: str) -> Path: """Compute the directory where chunks are temporarily stored.""" if uniqueidentifier == "": raise ValueError("Unique identifier cannot be empty!") - return Path(app.config["UPLOAD_FOLDER"], f"tempdir_{uniqueidentifier}") + return Path(uploads_dir(app), f"tempdir_{uniqueidentifier}") diff --git a/uploader/files/functions.py b/uploader/files/functions.py index 7b9f06b..68f4e16 100644 --- a/uploader/files/functions.py +++ b/uploader/files/functions.py @@ -8,6 +8,8 @@ from flask import current_app from werkzeug.utils import secure_filename from werkzeug.datastructures import FileStorage +from uploader.configutils import uploads_dir + from .chunks import chunked_binary_read def save_file(fileobj: FileStorage, upload_dir: Path, hashed: bool = True) -> Path: @@ -30,7 +32,7 @@ def save_file(fileobj: FileStorage, upload_dir: Path, hashed: bool = True) -> Pa def fullpath(filename: str): """Get a file's full path. This makes use of `flask.current_app`.""" - return Path(current_app.config["UPLOAD_FOLDER"], filename).absolute() + return Path(uploads_dir(current_app), filename).absolute() def sha256_digest_over_file(filepath: Path) -> str: diff --git a/uploader/files/views.py b/uploader/files/views.py index 29059c7..ea0e827 100644 --- a/uploader/files/views.py +++ b/uploader/files/views.py @@ -6,13 +6,15 @@ from pathlib import Path from flask import request, jsonify, Blueprint, current_app as app +from uploader.configutils import uploads_dir + from .chunks import chunk_name, chunks_directory files = Blueprint("files", __name__) def target_file(fileid: str) -> Path: """Compute the full path for the target file.""" - return Path(app.config["UPLOAD_FOLDER"], fileid) + return Path(uploads_dir(app), fileid) @files.route("/upload/resumable", methods=["GET"]) diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py index a22497c..06c417f 100644 --- a/uploader/phenotypes/models.py +++ b/uploader/phenotypes/models.py @@ -1,4 +1,6 @@ """Database and utility functions for phenotypes.""" +import time +import random import logging import tempfile from pathlib import Path @@ -6,8 +8,8 @@ from functools import reduce from datetime import datetime from typing import Union, Optional, Iterable -import MySQLdb as mdb -from MySQLdb.cursors import Cursor, DictCursor +from MySQLdb.connections import Connection +from MySQLdb.cursors import Cursor, DictCursor, BaseCursor from gn_libs.mysqldb import debug_query @@ -27,7 +29,7 @@ __PHENO_DATA_TABLES__ = { def datasets_by_population( - conn: mdb.Connection, + conn: Connection, species_id: int, population_id: int ) -> tuple[dict, ...]: @@ -42,7 +44,7 @@ def datasets_by_population( return tuple(dict(row) for row in cursor.fetchall()) -def dataset_by_id(conn: mdb.Connection, +def dataset_by_id(conn: Connection, species_id: int, population_id: int, dataset_id: int) -> dict: @@ -57,7 +59,7 @@ def dataset_by_id(conn: mdb.Connection, return dict(cursor.fetchone()) -def phenotypes_count(conn: mdb.Connection, +def phenotypes_count(conn: Connection, population_id: int, dataset_id: int) -> int: """Count the number of phenotypes in the dataset.""" @@ -85,11 +87,14 @@ def phenotype_publication_data(conn, phenotype_id) -> Optional[dict]: return dict(res) -def dataset_phenotypes(conn: mdb.Connection, - population_id: int, - dataset_id: int, - offset: int = 0, - limit: Optional[int] = None) -> tuple[dict, ...]: +def dataset_phenotypes( + conn: Connection, + population_id: int, + dataset_id: int, + offset: int = 0, + limit: Optional[int] = None, + xref_ids: tuple[int, ...] = tuple() +) -> tuple[dict, ...]: """Fetch the actual phenotypes.""" _query = ( "SELECT pheno.*, pxr.Id AS xref_id, pxr.InbredSetId, ist.InbredSetCode " @@ -98,14 +103,16 @@ def dataset_phenotypes(conn: mdb.Connection, "INNER JOIN PublishFreeze AS pf ON pxr.InbredSetId=pf.InbredSetId " "INNER JOIN InbredSet AS ist ON pf.InbredSetId=ist.Id " "WHERE pxr.InbredSetId=%s AND pf.Id=%s") + ( + f" AND pxr.Id IN ({', '.join(['%s'] * len(xref_ids))})" + if len(xref_ids) > 0 else "") + ( f" LIMIT {limit} OFFSET {offset}" if bool(limit) else "") with conn.cursor(cursorclass=DictCursor) as cursor: - cursor.execute(_query, (population_id, dataset_id)) + cursor.execute(_query, (population_id, dataset_id) + xref_ids) debug_query(cursor, logger) return tuple(dict(row) for row in cursor.fetchall()) -def __phenotype_se__(cursor: Cursor, xref_id, dataids_and_strainids): +def __phenotype_se__(cursor: BaseCursor, xref_id, dataids_and_strainids): """Fetch standard-error values (if they exist) for a phenotype.""" paramstr = ", ".join(["(%s, %s)"] * len(dataids_and_strainids)) flat = tuple(item for sublist in dataids_and_strainids for item in sublist) @@ -187,7 +194,7 @@ def __merge_pheno_data_and_se__(data, sedata) -> dict: def phenotype_by_id( - conn: mdb.Connection, + conn: Connection, species_id: int, population_id: int, dataset_id: int, @@ -225,7 +232,7 @@ def phenotype_by_id( return None -def phenotypes_data(conn: mdb.Connection, +def phenotypes_data(conn: Connection, population_id: int, dataset_id: int, offset: int = 0, @@ -249,7 +256,7 @@ def phenotypes_data(conn: mdb.Connection, def phenotypes_vector_data(# pylint: disable=[too-many-arguments, too-many-positional-arguments] - conn: mdb.Connection, + conn: Connection, species_id: int, population_id: int, xref_ids: tuple[int, ...] = tuple(), @@ -301,7 +308,7 @@ def phenotypes_vector_data(# pylint: disable=[too-many-arguments, too-many-posit return reduce(__organise__, cursor.fetchall(), {}) -def save_new_dataset(cursor: Cursor, +def save_new_dataset(cursor: BaseCursor, population_id: int, dataset_name: str, dataset_fullname: str, @@ -346,7 +353,7 @@ def __pre_process_phenotype_data__(row): def create_new_phenotypes(# pylint: disable=[too-many-locals] - conn: mdb.Connection, + conn: Connection, population_id: int, publication_id: int, phenotypes: Iterable[dict] @@ -474,7 +481,7 @@ def create_new_phenotypes(# pylint: disable=[too-many-locals] def save_phenotypes_data( - conn: mdb.Connection, + conn: Connection, table: str, data: Iterable[dict] ) -> int: @@ -504,7 +511,7 @@ def save_phenotypes_data( def quick_save_phenotypes_data( - conn: mdb.Connection, + conn: Connection, table: str, dataitems: Iterable[dict], tmpdir: Path @@ -534,3 +541,134 @@ def quick_save_phenotypes_data( ")") debug_query(cursor, logger) return _count + + +def __sleep_random__(): + """Sleep a random amount of time chosen from 0.05s to 1s in increments of 0.05""" + time.sleep(random.choice(tuple(i / 20.0 for i in range(1, 21)))) + + +def delete_phenotypes_data( + cursor: BaseCursor, + data_ids: tuple[int, ...] +) -> tuple[int, int, int]: + """Delete numeric data for phenotypes with the given data IDs.""" + if len(data_ids) == 0: + return (0, 0, 0) + + # Loop to handle big deletes i.e. ≥ 10000 rows + _dcount, _secount, _ncount = (0, 0, 0)# Count total rows deleted + while True: + _paramstr = ", ".join(["%s"] * len(data_ids)) + cursor.execute( + "DELETE FROM PublishData " + f"WHERE Id IN ({_paramstr}) " + "ORDER BY Id ASC, StrainId ASC "# Make deletions deterministic + "LIMIT 1000", + data_ids) + _dcount_curr = cursor.rowcount + _dcount += _dcount_curr + + cursor.execute( + "DELETE FROM PublishSE " + f"WHERE DataId IN ({_paramstr}) " + "ORDER BY DataId ASC, StrainId ASC "# Make deletions deterministic + "LIMIT 1000", + data_ids) + _secount_curr = cursor.rowcount + _secount += _secount_curr + + cursor.execute( + "DELETE FROM NStrain " + f"WHERE DataId IN ({_paramstr}) " + "ORDER BY DataId ASC, StrainId ASC "# Make deletions deterministic + "LIMIT 1000", + data_ids) + _ncount_curr = cursor.rowcount + _ncount += _ncount_curr + __sleep_random__() + + if all((_dcount_curr == 0, _secount_curr == 0, _ncount_curr == 0)): + # end loop if there are no more rows to delete. + break + + return (_dcount, _secount, _ncount) + + +def __linked_ids__( + cursor: BaseCursor, + population_id: int, + xref_ids: tuple[int, ...] +) -> tuple[tuple[int, int, int], ...]: + """Retrieve `DataId` values from `PublishXRef` table.""" + _paramstr = ", ".join(["%s"] * len(xref_ids)) + cursor.execute("SELECT PhenotypeId, PublicationId, DataId " + "FROM PublishXRef " + f"WHERE InbredSetId=%s AND Id IN ({_paramstr})", + (population_id,) + xref_ids) + return tuple( + (int(row["PhenotypeId"]), int(row["PublicationId"]), int(row["DataId"])) + for row in cursor.fetchall()) + + +def delete_phenotypes( + conn_or_cursor: Union[Connection, Cursor], + population_id: int, + xref_ids: tuple[int, ...] +) -> tuple[int, int, int, int]: + """Delete phenotypes and all their data.""" + def __delete_phenos__(cursor: BaseCursor, pheno_ids: tuple[int, ...]) -> int: + """Delete data from the `Phenotype` table.""" + _paramstr = ", ".join(["%s"] * len(pheno_ids)) + + _pcount = 0 + while True: + cursor.execute( + "DELETE FROM Phenotype " + f"WHERE Id IN ({_paramstr}) " + "ORDER BY Id " + "LIMIT 1000", + pheno_ids) + _pcount_curr = cursor.rowcount + _pcount += _pcount_curr + __sleep_random__() + if _pcount_curr == 0: + break + + return cursor.rowcount + + def __delete_xrefs__(cursor: BaseCursor) -> int: + _paramstr = ", ".join(["%s"] * len(xref_ids)) + + _xcount = 0 + while True: + cursor.execute( + "DELETE FROM PublishXRef " + f"WHERE InbredSetId=%s AND Id IN ({_paramstr}) " + "ORDER BY Id " + "LIMIT 10000", + (population_id,) + xref_ids) + _xcount_curr = cursor.rowcount + _xcount += _xcount_curr + __sleep_random__() + if _xcount_curr == 0: + break + + return _xcount + + def __with_cursor__(cursor): + _phenoids, _pubids, _dataids = reduce( + lambda acc, curr: (acc[0] + (curr[0],), + acc[1] + (curr[1],), + acc[2] + (curr[2],)), + __linked_ids__(cursor, population_id, xref_ids), + (tuple(), tuple(), tuple())) + __delete_phenos__(cursor, _phenoids) + return (__delete_xrefs__(cursor),) + delete_phenotypes_data( + cursor, _dataids) + + if isinstance(conn_or_cursor, BaseCursor): + return __with_cursor__(conn_or_cursor) + + with conn_or_cursor.cursor(cursorclass=DictCursor) as cursor: + return __with_cursor__(cursor) diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py index 60d5775..776fa52 100644 --- a/uploader/phenotypes/views.py +++ b/uploader/phenotypes/views.py @@ -34,6 +34,7 @@ from r_qtl import exceptions as rqe from uploader import jobs from uploader import session from uploader.files import save_file +from uploader.configutils import uploads_dir from uploader.flask_extensions import url_for from uploader.ui import make_template_renderer from uploader.oauth2.client import oauth2_post @@ -329,7 +330,7 @@ def process_phenotypes_rqtl2_bundle(error_uri): try: ## Handle huge files here... phenobundle = save_file(request.files["phenotypes-bundle"], - Path(app.config["UPLOAD_FOLDER"])) + uploads_dir(app)) rqc.validate_bundle(phenobundle) return phenobundle except AssertionError as _aerr: @@ -352,7 +353,7 @@ def process_phenotypes_individual_files(error_uri): "comment.char": form["file-comment-character"], "na.strings": form["file-na"].split(" "), } - bundlepath = Path(app.config["UPLOAD_FOLDER"], + bundlepath = Path(uploads_dir(app), f"{str(uuid.uuid4()).replace('-', '')}.zip") with ZipFile(bundlepath,mode="w") as zfile: for rqtlkey, formkey, _type in ( @@ -370,7 +371,7 @@ def process_phenotypes_individual_files(error_uri): # Chunked upload of large files was used filedata = json.loads(form[formkey]) zfile.write( - Path(app.config["UPLOAD_FOLDER"], filedata["uploaded-file"]), + Path(uploads_dir(app), filedata["uploaded-file"]), arcname=filedata["original-name"]) cdata[rqtlkey] = cdata.get(rqtlkey, []) + [filedata["original-name"]] else: @@ -382,9 +383,9 @@ def process_phenotypes_individual_files(error_uri): return error_uri filepath = save_file( - _sentfile, Path(app.config["UPLOAD_FOLDER"]), hashed=False) + _sentfile, uploads_dir(app), hashed=False) zfile.write( - Path(app.config["UPLOAD_FOLDER"], filepath), + Path(uploads_dir(app), filepath), arcname=filepath.name) cdata[rqtlkey] = cdata.get(rqtlkey, []) + [filepath.name] @@ -464,7 +465,7 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p **({"publicationid": request.form["publication-id"]} if request.form.get("publication-id") else {})})}), _redisuri, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{uploads_dir(app)}/job_errors") app.logger.debug("JOB DETAILS: %s", _job) jobstatusuri = url_for("species.populations.phenotypes.job_status", @@ -611,6 +612,11 @@ def load_phenotypes_success_handler(job): job_id=job["job_id"])) +def proceed_to_job_status(job): + app.logger.debug("The new job: %s", job) + return redirect(url_for("background-jobs.job_status", job_id=job["job_id"])) + + @phenotypesbp.route( "<int:species_id>/populations/<int:population_id>/phenotypes/datasets" "/<int:dataset_id>/load-data-to-database", @@ -653,11 +659,6 @@ def load_data_to_database( def __handle_error__(resp): return render_template("http-error.html", *resp.json()) - def __handle_success__(load_job): - app.logger.debug("The phenotypes loading job: %s", load_job) - return redirect(url_for( - "background-jobs.job_status", job_id=load_job["job_id"])) - return request_token( token_uri=urljoin(oauth2client.authserver_uri(), "auth/token"), @@ -685,10 +686,10 @@ def load_data_to_database( lambda job: gnlibs_jobs.launch_job( job, _jobs_db, - Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"), + Path(f"{uploads_dir(app)}/job_errors"), worker_manager="gn_libs.jobs.launcher", loglevel=_loglevel) - ).either(__handle_error__, __handle_success__) + ).either(__handle_error__, proceed_to_job_status) def update_phenotype_metadata(conn, metadata: dict): @@ -1063,7 +1064,7 @@ def recompute_means(# pylint: disable=[unused-argument] }, external_id=session.logged_in_user_id()), _jobs_db, - Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"), + Path(f"{uploads_dir(app)}/job_errors"), worker_manager="gn_libs.jobs.launcher", loglevel=_loglevel) return redirect(url_for("background-jobs.job_status", @@ -1105,7 +1106,7 @@ def rerun_qtlreaper(# pylint: disable=[unused-argument] _job_id = uuid.uuid4() _loglevel = logging.getLevelName(app.logger.getEffectiveLevel()).lower() - _workingdir = Path(app.config["TEMPORARY_DIRECTORY"]).joinpath("qtlreaper") + _workingdir = Path(app.config["SCRATCH_DIRECTORY"]).joinpath("qtlreaper") _workingdir.mkdir(exist_ok=True) command = [ sys.executable, @@ -1143,7 +1144,7 @@ def rerun_qtlreaper(# pylint: disable=[unused-argument] }, external_id=session.logged_in_user_id()), _jobs_db, - Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"), + Path(f"{uploads_dir(app)}/job_errors"), worker_manager="gn_libs.jobs.launcher", loglevel=_loglevel) return redirect(url_for("background-jobs.job_status", @@ -1155,3 +1156,119 @@ def rerun_qtlreaper(# pylint: disable=[unused-argument] def rerun_qtlreaper_success_handler(job): """Handle success (re)running QTLReaper script.""" return return_to_dataset_view_handler(job, "QTLReaper ran successfully!") + + +def delete_phenotypes_success_handler(job): + """Handle success running the 'delete-phenotypes' script.""" + return return_to_dataset_view_handler( + job, "Phenotypes deleted successfully.") + + +@phenotypesbp.route( + "<int:species_id>/populations/<int:population_id>/phenotypes/datasets" + "/<int:dataset_id>/delete", + methods=["GET", "POST"]) +@require_login +@with_dataset( + species_redirect_uri="species.populations.phenotypes.index", + population_redirect_uri="species.populations.phenotypes.select_population", + redirect_uri="species.populations.phenotypes.list_datasets") +def delete_phenotypes(# pylint: disable=[unused-argument] + species: dict, + population: dict, + dataset: dict, + **kwargs +): + """Delete the specified phenotype data.""" + _dataset_page = redirect(url_for( + "species.populations.phenotypes.view_dataset", + species_id=species["SpeciesId"], + population_id=population["Id"], + dataset_id=dataset["Id"])) + + def __handle_error__(resp): + flash( + "Error retrieving authorisation token. Phenotype deletion " + "failed. Please try again later.", + "alert alert-danger") + return _dataset_page + + _jobs_db = app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"] + with (database_connection(app.config["SQL_URI"]) as conn, + sqlite3.connection(_jobs_db) as jobsconn): + form = request.form + xref_ids = tuple(int(item) for item in set(form.getlist("xref_ids"))) + + match form.get("action"): + case "cancel": + return redirect(url_for( + "species.populations.phenotypes.view_dataset", + species_id=species["SpeciesId"], + population_id=population["Id"], + dataset_id=dataset["Id"])) + case "delete": + _loglevel = logging.getLevelName( + app.logger.getEffectiveLevel()).lower() + if form.get("confirm_delete_all_phenotypes", "") == "on": + _cmd = ["--delete-all"] + else: + # setup phenotypes xref_ids file + _xref_ids_file = Path( + app.config["SCRATCH_DIRECTORY"], + f"delete-phenotypes-{uuid.uuid4()}.txt") + with _xref_ids_file.open(mode="w", encoding="utf8") as ptr: + ptr.write("\n".join(str(_id) for _id in xref_ids)) + + _cmd = ["--xref_ids_file", str(_xref_ids_file)] + + _job_id = uuid.uuid4() + return request_token( + token_uri=urljoin( + oauth2client.authserver_uri(), "auth/token"), + user_id=session.user_details()["user_id"] + ).then( + lambda token: gnlibs_jobs.initialise_job( + jobsconn, + _job_id, + [ + sys.executable, + "-u", + "-m", + "scripts.phenotypes.delete_phenotypes", + "--log-level", _loglevel, + app.config["SQL_URI"], + str(species["SpeciesId"]), + str(population["Id"]), + str(dataset["Id"]), + app.config["AUTH_SERVER_URL"], + token["access_token"]] + _cmd, + "delete-phenotypes", + extra_meta={ + "species_id": species["SpeciesId"], + "population_id": population["Id"], + "dataset_id": dataset["Id"], + "success_handler": ( + "uploader.phenotypes.views." + "delete_phenotypes_success_handler") + }, + external_id=session.logged_in_user_id()) + ).then( + lambda _job: gnlibs_jobs.launch_job( + _job, + _jobs_db, + Path(f"{uploads_dir(app)}/job_errors"), + worker_manager="gn_libs.jobs.launcher", + loglevel=_loglevel) + ).either(__handle_error__, proceed_to_job_status) + case _: + _phenos = tuple() + if len(xref_ids) > 0: + _phenos = dataset_phenotypes( + conn, population["Id"], dataset["Id"], xref_ids=xref_ids) + + return render_template( + "phenotypes/confirm-delete-phenotypes.html", + species=species, + population=population, + dataset=dataset, + phenotypes=_phenos) diff --git a/uploader/population/rqtl2.py b/uploader/population/rqtl2.py index 97d4854..bb5066e 100644 --- a/uploader/population/rqtl2.py +++ b/uploader/population/rqtl2.py @@ -134,7 +134,7 @@ def upload_rqtl2_bundle(species_id: int, population_id: int): try: app.logger.debug("Files in the form: %s", request.files) the_file = save_file(request.files["rqtl2_bundle_file"], - Path(app.config["UPLOAD_FOLDER"])) + Path(app.config["UPLOADS_DIRECTORY"])) except AssertionError: app.logger.debug(traceback.format_exc()) flash("Please provide a valid R/qtl2 zip bundle.", @@ -185,7 +185,7 @@ def trigger_rqtl2_bundle_qc( "rqtl2-bundle-file": str(rqtl2bundle.absolute()), "original-filename": originalfilename})}), redisuri, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return jobid @@ -895,7 +895,7 @@ def confirm_bundle_details(species_id: int, population_id: int): }) }), redisuri, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return redirect(url_for("expression-data.rqtl2.rqtl2_processing_status", jobid=jobid)) diff --git a/uploader/samples/views.py b/uploader/samples/views.py index f318bf0..2a09f8e 100644 --- a/uploader/samples/views.py +++ b/uploader/samples/views.py @@ -1,5 +1,4 @@ """Code regarding samples""" -import os import sys import uuid import logging @@ -11,8 +10,8 @@ from flask import (flash, Blueprint, current_app as app) +from gn_libs import jobs from gn_libs import sqlite3 -from gn_libs import jobs as jobs from uploader import session from uploader.files import save_file @@ -26,8 +25,7 @@ from uploader.datautils import safe_int, enumerate_sequence from uploader.species.models import all_species, species_by_id from uploader.request_checks import with_species, with_population from uploader.db_utils import (with_db_connection, - database_connection, - with_redis_connection) + database_connection) from .models import samples_by_species_and_population @@ -140,7 +138,7 @@ def upload_samples(species_id: int, population_id: int):#pylint: disable=[too-ma try: samples_file = save_file(request.files["samples_file"], - Path(app.config["UPLOAD_FOLDER"])) + Path(app.config["UPLOADS_DIRECTORY"])) except AssertionError: flash("You need to provide a file with the samples data.", "alert-error") @@ -174,12 +172,33 @@ def upload_samples(species_id: int, population_id: int):#pylint: disable=[too-ma ] + (["--firstlineheading"] if firstlineheading else []), "samples_upload", extra_meta={ - "job_name": f"Samples Upload: {samples_file.name}" + "job_name": f"Samples Upload: {samples_file.name}", + "species_id": species["SpeciesId"], + "population_id": population["Id"], + "success_handler": ( + "uploader.samples.views.samples_upload_success_handler") }, external_id=session.logged_in_user_id()), _jobs_db, - Path(f"{app.config['UPLOAD_FOLDER']}/job_errors").absolute(), + Path(f"{app.config['UPLOADS_DIRECTORY']}/job_errors").absolute(), loglevel=logging.getLevelName( app.logger.getEffectiveLevel()).lower()) return redirect( url_for("background-jobs.job_status", job_id=job["job_id"])) + + +def samples_upload_success_handler(job): + """Handler for background jobs: Successful upload of samples""" + return return_to_samples_list_view_handler( + job, "Samples uploaded successfully.") + + +def return_to_samples_list_view_handler(job, msg): + """Handler for background jobs: Return to list_samples page.""" + flash(msg, "alert alert-success") + return redirect(url_for( + "species.populations.samples." + "list_samples", + species_id=job["metadata"]["species_id"], + population_id=job["metadata"]["population_id"], + job_id=job["job_id"])) diff --git a/uploader/session.py b/uploader/session.py index 9cb305b..9872ceb 100644 --- a/uploader/session.py +++ b/uploader/session.py @@ -4,8 +4,8 @@ from uuid import UUID, uuid4 from datetime import datetime from typing import Any, Optional, TypedDict +from flask import session from authlib.jose import KeySet -from flask import request, session from pymonad.either import Left, Right, Either logger = logging.getLogger(__name__) @@ -25,8 +25,6 @@ class SessionInfo(TypedDict): session_id: UUID user: UserDetails anon_id: UUID - user_agent: str - ip_addr: str masquerade: Optional[UserDetails] auth_server_jwks: Optional[dict[str, Any]] @@ -69,9 +67,6 @@ def session_info() -> SessionInfo: "logged_in": False }, "anon_id": anon_id, - "user_agent": request.headers.get("User-Agent"), - "ip_addr": request.environ.get("HTTP_X_FORWARDED_FOR", - request.remote_addr), "masquerading": None })) diff --git a/uploader/static/css/layout-common.css b/uploader/static/css/layout-common.css index 88e580c..9c9d034 100644 --- a/uploader/static/css/layout-common.css +++ b/uploader/static/css/layout-common.css @@ -2,20 +2,20 @@ box-sizing: border-box; } - body { - display: grid; - grid-gap: 1em; - } +body { + display: grid; + grid-gap: 1em; +} - #header { - margin: -0.7em; /* Fill entire length of screen */ - /* Define layout for the children elements */ - display: grid; - } +#header { + margin: -0.7em; /* Fill entire length of screen */ + /* Define layout for the children elements */ + display: grid; +} - #header #header-nav { - /* Place it in the parent element */ - grid-column-start: 1; - grid-column-end: 2; - display: flex; - } +#header #header-nav { + /* Place it in the parent element */ + grid-column-start: 1; + grid-column-end: 2; + display: flex; +} diff --git a/uploader/static/css/layout-medium.css b/uploader/static/css/layout-medium.css index bf10563..50ceeb4 100644 --- a/uploader/static/css/layout-medium.css +++ b/uploader/static/css/layout-medium.css @@ -49,7 +49,6 @@ /* Place it in the parent element */ grid-column-start: 1; grid-column-end: 2; - grid-gap: 5px; /* Define layout for the children elements */ max-width: 100%; diff --git a/uploader/static/css/layout-small.css b/uploader/static/css/layout-small.css index 80a3759..2e47217 100644 --- a/uploader/static/css/layout-small.css +++ b/uploader/static/css/layout-small.css @@ -2,7 +2,7 @@ body { display: grid; grid-template-columns: 1fr; - grid-template-rows: 1fr 2fr 7fr; + grid-template-rows: 1fr 90fr; grid-gap: 1em; } @@ -31,6 +31,11 @@ grid-column-end: 2; } + #header #header-nav ul { + display: grid; + grid-template-columns: 1fr; + } + #main { /* Place it in the parent element */ grid-column-start: 1; @@ -38,7 +43,7 @@ display: grid; /* Define layout for the children elements */ - grid-template-rows: 1.5em 80% 20%; + grid-template-rows: 1fr 80fr 20fr; grid-template-columns: 1fr; } diff --git a/uploader/static/js/datatables.js b/uploader/static/js/datatables.js index 82fd696..bfcda2a 100644 --- a/uploader/static/js/datatables.js +++ b/uploader/static/js/datatables.js @@ -11,13 +11,36 @@ var addTableLength = (menuList, lengthToAdd, dataLength) => { var defaultLengthMenu = (data) => { menuList = [] - var lengths = [10, 25, 50, 100, 1000, data.length]; + var lengths = [10, 25, 50, 100, 1000]; + if(data.length > 1000) { + lengths.push(data.length) + } lengths.forEach((len) => { menuList = addTableLength(menuList, len, data.length); }); return menuList; }; +var setRowCheckableProperty = (node, state) => { + /** + * Set a row's (`node`) checkbox's or radio button's checked state to the + * boolean value `state`. + **/ + if(typeof(state) == "boolean") { + var pseudoclass = state == false ? ":checked" : ":not(:checked)"; + var checkable = ( + $(node).find(`input[type="checkbox"]${pseudoclass}`)[0] + || + $(node).find(`input[type="radio"]${pseudoclass}`)[0]); + $(checkable).prop("checked", state); + } else { + throw new Error("`state` *MUST* be a boolean value.") + } +}; + +var setRowChecked = (node) => {setRowCheckableProperty(node, true);}; +var setRowUnchecked = (node) => {setRowCheckableProperty(node, false);}; + var buildDataTable = (tableId, data = [], columns = [], userSettings = {}) => { var defaultSettings = { responsive: true, @@ -35,35 +58,40 @@ var buildDataTable = (tableId, data = [], columns = [], userSettings = {}) => { lengthMenu: "", info: "" }, - data: data, - columns: columns, - drawCallback: (settings) => { - $(this[0]).find("tbody tr").each((idx, row) => { - var arow = $(row); - var checkboxOrRadio = arow.find(".chk-row-select"); - if (checkboxOrRadio) { - if (arow.hasClass("selected")) { - checkboxOrRadio.prop("checked", true); - } else { - checkboxOrRadio.prop("checked", false); - } - } + drawCallback: function (settings) { + var api = this.api(); + api.rows({selected: true}).nodes().each((node, index) => { + setRowChecked(node); + }); + api.rows({selected: false}).nodes().each((node, index) => { + setRowUnchecked(node); }); } } var theDataTable = $(tableId).DataTable({ ...defaultSettings, - ...userSettings + ...userSettings, + ...(data.length == 0 ? {} : {data: data}), + ...(columns.length == 0 ? {} : {columns: columns}) }); - theDataTable.on("select", (event, datatable, type, cell, originalEvent) => { - datatable.rows({selected: true}).nodes().each((node, index) => { - $(node).find(".chk-row-select").prop("checked", true) - }); + theDataTable.on("select", (event, datatable, type, indexes) => { + datatable + .rows(indexes) + .nodes() + .each((node, index) => { + setRowChecked(node); + }); }); - theDataTable.on("deselect", (event, datatable, type, cell, originalEvent) => { - datatable.rows({selected: false}).nodes().each((node, index) => { - $(node).find(".chk-row-select").prop("checked", false) - }); + theDataTable.on("deselect", (event, datatable, type, indexes) => { + datatable + .rows(indexes) + .nodes() + .each(function(node, index) { + setRowUnchecked(node); + }); }); + + theDataTable.selectAll = () => {theDataTable.rows().select()}; + theDataTable.deselectAll = () => {theDataTable.rows().deselect()}; return theDataTable; }; diff --git a/uploader/static/js/utils.js b/uploader/static/js/utils.js index 1b31661..62d3662 100644 --- a/uploader/static/js/utils.js +++ b/uploader/static/js/utils.js @@ -28,7 +28,8 @@ var remove_class = (element, classvalue) => { var add_class = (element, classvalue) => { remove_class(element, classvalue); - element.attr("class", (element.attr("class") || "") + " " + classvalue); + element.attr("class", + ((element.attr("class") || "") + " " + classvalue).trim()); }; $(".not-implemented").click((event) => { diff --git a/uploader/templates/background-jobs/job-status.html b/uploader/templates/background-jobs/job-status.html index 50cf6e5..2e75c6d 100644 --- a/uploader/templates/background-jobs/job-status.html +++ b/uploader/templates/background-jobs/job-status.html @@ -30,12 +30,16 @@ <div class="row"> <h3 class="subheading">STDOUT</h3> - <pre>{{job["stdout"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stdout"]}}</pre> + </div> </div> <div class="row"> <h3 class="subheading">STDERR</h3> - <pre>{{job["stderr"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stderr"]}}</pre> + </div> </div> {%endblock%} diff --git a/uploader/templates/background-jobs/job-summary.html b/uploader/templates/background-jobs/job-summary.html index c2c2d6b..ef9ef6c 100644 --- a/uploader/templates/background-jobs/job-summary.html +++ b/uploader/templates/background-jobs/job-summary.html @@ -50,12 +50,16 @@ <div class="row"> <h3 class="subheading">Script Errors and Logging</h3> - <pre>{{job["stderr"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stderr"]}}</pre> + </div> </div> <div class="row"> <h3 class="subheading">Script Output</h3> - <pre>{{job["stdout"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stdout"]}}</pre> + </div> </div> {%endblock%} diff --git a/uploader/templates/phenotypes/add-phenotypes-base.html b/uploader/templates/phenotypes/add-phenotypes-base.html index 690c7e1..c74a0fa 100644 --- a/uploader/templates/phenotypes/add-phenotypes-base.html +++ b/uploader/templates/phenotypes/add-phenotypes-base.html @@ -29,8 +29,7 @@ {%block frm_add_phenotypes_elements%}{%endblock%} - <fieldset id="fldset-publication-info"> - <legend>Publication Information</legend> + <h4>Publication Information</h4> <input type="hidden" name="publication-id" id="txt-publication-id" /> <span class="form-text text-muted"> Select a publication for your data. <br /> @@ -53,7 +52,6 @@ <tbody></tbody> </table> - </fieldset> <div class="form-group"> <input type="submit" diff --git a/uploader/templates/phenotypes/add-phenotypes-raw-files.html b/uploader/templates/phenotypes/add-phenotypes-raw-files.html index a02fae7..b1322b2 100644 --- a/uploader/templates/phenotypes/add-phenotypes-raw-files.html +++ b/uploader/templates/phenotypes/add-phenotypes-raw-files.html @@ -21,8 +21,7 @@ {%endblock%} {%block frm_add_phenotypes_elements%} -<fieldset id="fldset-file-metadata"> - <legend>File(s) Metadata</legend> + <h4>File(s) Metadata</h4> <div class="form-group"> <label for="txt-file-separator" class="form-label">File Separator</label> <div class="input-group"> @@ -89,12 +88,9 @@ <a href="#docs-file-na" title="Documentation for no-value fields"> documentation for more information</a>.</span> </div> -</fieldset> -<fieldset id="fldset-files"> <legend>Data File(s)</legend> - <fieldset id="fldset-descriptions-file"> <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-descriptions-transposed" @@ -145,10 +141,8 @@ {{display_preview_table( "tbl-preview-pheno-desc", "phenotype descriptions")}} </div> - </fieldset> - - <fieldset id="fldset-data-file"> + <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-data-transposed" @@ -196,11 +190,9 @@ on the expected format for the file provided here.</p>')}} {{display_preview_table("tbl-preview-pheno-data", "phenotype data")}} </div> - </fieldset> {%if population.Family in families_with_se_and_n%} - <fieldset id="fldset-se-file"> <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-se-transposed" @@ -247,10 +239,8 @@ {{display_preview_table("tbl-preview-pheno-se", "standard errors")}} </div> - </fieldset> - <fieldset id="fldset-n-file"> <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-n-transposed" @@ -297,8 +287,6 @@ {{display_preview_table("tbl-preview-pheno-n", "number of samples/individuals")}} </div> - </fieldset> -</fieldset> {%endif%} {%endblock%} @@ -477,7 +465,7 @@ .map((field) => { var value = field.trim(); if(navalues.includes(value)) { - return "⋘NUL⋙"; + return "[NO-VALUE]"; } return value; }) diff --git a/uploader/templates/phenotypes/confirm-delete-phenotypes.html b/uploader/templates/phenotypes/confirm-delete-phenotypes.html new file mode 100644 index 0000000..e6d67c7 --- /dev/null +++ b/uploader/templates/phenotypes/confirm-delete-phenotypes.html @@ -0,0 +1,196 @@ +{%extends "phenotypes/base.html"%} +{%from "flash_messages.html" import flash_all_messages%} + +{%block title%}Phenotypes{%endblock%} + +{%block pagetitle%}Delete Phenotypes{%endblock%} + +{%block lvl4_breadcrumbs%} +<li {%if activelink=="view-dataset"%} + class="breadcrumb-item active" + {%else%} + class="breadcrumb-item" + {%endif%}> + <a href="{{url_for('species.populations.phenotypes.view_dataset', + species_id=species.SpeciesId, + population_id=population.Id, + dataset_id=dataset.Id)}}">View</a> +</li> +{%endblock%} + +{%block contents%} +{{flash_all_messages()}} + +<div class="row"><h2>Delete Phenotypes</h2></div> + +{%if phenotypes | length > 0%} +<div class="row"> + <p>You have requested to delete the following phenotypes:</p> +</div> + +<div class="row"> + <div class="col"> + <a id="btn-select-all-phenotypes" + href="#" + class="btn btn-info" + title="Select all phenotypes">select all</a> + </div> + <div class="col"> + <a id="btn-deselect-all-phenotypes" + href="#" + class="btn btn-warning" + title="Deselect all phenotypes">deselect all</a> + </div> +</div> + +<div class="row"> + <table id="tbl-delete-phenotypes" class="table"> + <thead> + <tr> + <th>#</th> + <th>Record ID</th> + <th>Description</th> + </tr> + </thead> + <tbody> + {%for phenotype in phenotypes%} + <tr> + <td> + <input id="chk-xref-id-{{phenotype.xref_id}}" + name="xref_ids" + type="checkbox" + value="{{phenotype.xref_id}}" + class="chk-row-select" /> + </td> + <td>{{phenotype.xref_id}}</td> + <td>{{phenotype.Post_publication_description or + phenotype.Pre_publication_description or + phenotype.original_description}}</td> + </tr> + {%endfor%} + </tbody> + </table> +</div> + +<div class="row"> + <form id="frm-delete-phenotypes-selected" + method="POST" + action="{{url_for('species.populations.phenotypes.delete_phenotypes', + species_id=species.SpeciesId, + population_id=population.Id, + dataset_id=dataset.Id)}}"> + <div class="row"> + <div class="col"> + <input class="btn btn-info" + type="submit" + title="Cancel delete and return to dataset page." + name="action" + value="cancel" /></div> + <div class="col"> + <input id="btn-delete-phenotypes-selected" + class="btn btn-danger" + type="submit" + title="Delete the selected phenotypes from this dataset." + name="action" + value="delete" /> + </div> + </div> + </form> +</div> +{%else%} +<div class="row"> + <p>You did not select any phenotypes to delete. Delete everything?</p> +</div> + +<div class="row"> + <form id="frm-delete-phenotypes-all" + method="POST" + action="{{url_for('species.populations.phenotypes.delete_phenotypes', + species_id=species.SpeciesId, + population_id=population.Id, + dataset_id=dataset.Id)}}"> + <div class="form-check"> + <input class="form-check-input" + type="checkbox" + name="confirm_delete_all_phenotypes" + id="chk-confirm-delete-all-phenotypes" /> + <label class="form-check-label" + for="chk-confirm-delete-all-phenotypes"> + delete all phenotypes?</label> + </div> + + <div class="row"> + <div class="col"> + <input class="btn btn-info" + type="submit" + title="Cancel delete and return to dataset page." + name="action" + value="cancel" /></div> + <div class="col"> + <input class="btn btn-danger" + type="submit" + title="Delete all phenotypes in this dataset." + name="action" + value="delete" /> + </div> + </div> + </form> +</div> +{%endif%} + +{%endblock%} + +{%block javascript%} +<script type="text/javascript"> + $(function() { + var dt = buildDataTable( + "#tbl-delete-phenotypes", + data=[], + columns=[], + userSettings={ + responsive: true, + select: { + style: "os", + info: false + }, + initComplete: function(setting, json) { + var api = this.api(); + api.rows().select(); + api.rows({selected: true}).nodes().each((node, index) => { + setRowChecked(node); + }); + } + }); + + $("#btn-select-all-phenotypes").on("click", function(event) { + dt.selectAll(); + }); + + $("#btn-deselect-all-phenotypes").on("click", function(event) { + dt.deselectAll(); + }); + + $("#btn-delete-phenotypes-selected").on("click", function(event) { + event.preventDefault(); + form = $("#frm-delete-phenotypes-selected"); + form.find(".dynamically-added-element").remove(); + dt.rows({selected: true}).nodes().each(function(node, index) { + var xref_id = $(node) + .find('input[type="checkbox"]:checked') + .val(); + var chk = $('<input type="checkbox">'); + chk.attr("class", "dynamically-added-element"); + chk.attr("value", xref_id); + chk.attr("name", "xref_ids"); + chk.attr("style", "display: none"); + chk.prop("checked", true); + form.append(chk); + }); + form.append( + $('<input type="hidden" name="action" value="delete" />')); + form.submit(); + }) + }); +</script> +{%endblock%} + diff --git a/uploader/templates/phenotypes/macro-display-preview-table.html b/uploader/templates/phenotypes/macro-display-preview-table.html index 5a4c422..6dffe9f 100644 --- a/uploader/templates/phenotypes/macro-display-preview-table.html +++ b/uploader/templates/phenotypes/macro-display-preview-table.html @@ -1,19 +1,11 @@ {%macro display_preview_table(tableid, filetype)%} -<div class="card"> - <div class="card-body"> - <h5 class="card-title">{{filetype | title}}: File Preview</h5> - <div class="card-text" style="overflow: scroll;"> - <table id="{{tableid}}" class="table table-condensed table-responsive"> - <thead> - <tr> - </tr> - <tbody> - <tr> - <td class="data-row-template text-info"></td> - </tr> - </tbody> - </table> - </div> - </div> +<div class="table-responsive" + style="max-width:39.2em;border-radius:5px;border: solid 1px;overflow-x: scroll;"> + <h5>{{filetype | title}}: File Preview</h5> + <table id="{{tableid}}" class="table"> + <thead><tr></tr></thead> + + <tbody></tbody> + </table> </div> {%endmacro%} diff --git a/uploader/templates/phenotypes/view-dataset.html b/uploader/templates/phenotypes/view-dataset.html index 1fd15b2..3bb2586 100644 --- a/uploader/templates/phenotypes/view-dataset.html +++ b/uploader/templates/phenotypes/view-dataset.html @@ -23,25 +23,12 @@ {{flash_all_messages()}} <div class="row"> - <p>The basic dataset details are:</p> - - <table class="table"> - <thead> - <tr> - <th>Name</th> - <th>Full Name</th> - <th>Short Name</th> - </tr> - </thead> + <h2>Phenotype Data</h2> - <tbody> - <tr> - <td>{{dataset.Name}}</td> - <td>{{dataset.FullName}}</td> - <td>{{dataset.ShortName}}</td> - </tr> - </tbody> - </table> + <p>Click on any of the phenotypes in the table below to view and edit that + phenotype's data.</p> + <p>Use the search to filter through all the phenotypes and find specific + phenotypes of interest.</p> </div> <div class="row"> @@ -67,7 +54,7 @@ <input type="submit" title="Compute/Recompute the means for all phenotypes." class="btn btn-info" - value="(rec/c)ompute means" + value="compute means" id="submit-frm-recompute-phenotype-means" /> </form> </div> @@ -85,24 +72,29 @@ <input type="submit" title="Run/Rerun QTLReaper." class="btn btn-info" - value="(re)run QTLReaper" + value="run QTLReaper" id="submit-frm-rerun-qtlreaper" /> </form> </div> -</div> - -<div class="row"> - <h2>Phenotype Data</h2> - <p>Click on any of the phenotypes in the table below to view and edit that - phenotype's data.</p> - <p>Use the search to filter through all the phenotypes and find specific - phenotypes of interest.</p> + <div class="col"> + <form id="frm-delete-phenotypes" + method="POST" + action="{{url_for( + 'species.populations.phenotypes.delete_phenotypes', + species_id=species['SpeciesId'], + population_id=population['Id'], + dataset_id=dataset['Id'])}}"> + <input type="submit" + class="btn btn-danger" + id="btn-delete-phenotypes" + title="Delete phenotypes from this dataset. If no phenotypes are selected in the table, this will delete ALL the phenotypes." + value="delete phenotypes" /> + </form> + </div> </div> - -<div class="row"> - +<div class="row" style="margin-top: 0.5em;"> <table id="tbl-phenotypes-list" class="table compact stripe cell-border"> <thead> <tr> @@ -202,6 +194,33 @@ }); form.submit(); }); + + $("#btn-delete-phenotypes").on( + "click", + function(event) { + // Collect selected phenotypes for deletion, if any. + event.preventDefault(); + form = $("#frm-delete-phenotypes"); + form.find(".dynamically-added-element").remove(); + $("#tbl-phenotypes-list") + .DataTable() + .rows({selected: true}). + nodes().each(function(node, index) { + var parts = $(node) + .find(".chk-row-select") + .val() + .split("_"); + var xref_id = parts[parts.length - 1].trim(); + var chk = $('<input type="checkbox">'); + chk.attr("class", "dynamically-added-element"); + chk.attr("value", xref_id); + chk.attr("name", "xref_ids"); + chk.attr("style", "display: none"); + chk.prop("checked", true); + form.append(chk); + }); + form.submit(); + }); }); </script> {%endblock%} |
