diff options
23 files changed, 220 insertions, 104 deletions
diff --git a/.guix-channel b/.guix-channel index 54206b2..f1a8fa6 100644 --- a/.guix-channel +++ b/.guix-channel @@ -35,11 +35,12 @@ (channel (name guix-bioinformatics) (url "https://git.genenetwork.org/guix-bioinformatics") - (commit "903465c85c9b2ae28480b236c3364da873ca8f51")) + (commit "9b0955f14ec725990abb1f6af3b9f171e4943f77")) (channel (name guix-past) (url "https://codeberg.org/guix-science/guix-past") (branch "master") + (commit "473c942b509ab3ead35159d27dfbf2031a36cd4d") (introduction (channel-introduction (version 0) @@ -50,6 +51,7 @@ (name guix-rust-past-crates) (url "https://codeberg.org/guix/guix-rust-past-crates.git") (branch "trunk") + (commit "b8b7ffbd1cec9f56f93fae4da3a74163bbc9c570") (introduction (channel-introduction (version 0) diff --git a/qc_app/default_settings.py b/qc_app/default_settings.py index 7a9da0f..7bb0bf8 100644 --- a/qc_app/default_settings.py +++ b/qc_app/default_settings.py @@ -7,7 +7,7 @@ import os LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING") SECRET_KEY = b"<Please! Please! Please! Change This!>" -UPLOAD_FOLDER = "/tmp/qc_app_files" +UPLOADS_DIRECTORY = "/tmp/qc_app_files" REDIS_URL = "redis://" JOBS_TTL_SECONDS = 1209600 # 14 days GNQC_REDIS_PREFIX="GNQC" diff --git a/scripts/phenotypes/delete_phenotypes.py b/scripts/phenotypes/delete_phenotypes.py index 028f061..461f3ec 100644 --- a/scripts/phenotypes/delete_phenotypes.py +++ b/scripts/phenotypes/delete_phenotypes.py @@ -24,12 +24,15 @@ def read_xref_ids_file(filepath: Optional[Path]) -> tuple[int, ...]: if filepath is None: return tuple() + logger.debug("Using file '%s' to retrieve XREF IDs for deletion.", + filepath.name) _ids: tuple[int, ...] = tuple() with filepath.open(mode="r") as infile: - try: - _ids += (int(infile.readline().strip()),) - except TypeError: - pass + for line in infile.readlines(): + try: + _ids += (int(line.strip()),) + except TypeError: + pass return _ids @@ -125,16 +128,27 @@ if __name__ == "__main__": assert not (len(xref_ids) > 0 and args.delete_all) xref_ids = (fetch_all_xref_ids(cursor, args.population_id) if args.delete_all else xref_ids) + logger.debug("Will delete %s phenotypes and related data", + len(xref_ids)) if len(xref_ids) == 0: print("No cross-reference IDs were provided. Aborting.") return 0 + print("Updating authorisations: ", end="") update_auth((args.auth_server_uri, args.auth_token), args.species_id, args.population_id, args.dataset_id, xref_ids) + print("OK.") + print("Deleting the data: ", end="") delete_phenotypes(cursor, args.population_id, xref_ids=xref_ids) + print("OK.") + if args.xref_ids_file is not None: + print("Deleting temporary file: ", end="") + args.xref_ids_file.unlink() + print("OK.") + return 0 except AssertionError: logger.error( @@ -143,6 +157,14 @@ if __name__ == "__main__": "and also specify to 'DELETE-ALL' phenotypes in the " "population, we have no way of knowing what it is you want.") return 1 + except requests.exceptions.HTTPError as _exc: + resp = _exc.response + resp_data = resp.json() + logger.debug("%s: %s", + resp_data["error"], + resp_data["error_description"], + exc_info=True) + return 1 except Exception as _exc:# pylint: disable=[broad-exception-caught] logger.debug("Failed while attempting to delete phenotypes.", exc_info=True) diff --git a/scripts/run_qtlreaper.py b/scripts/run_qtlreaper.py index 7d58402..54e5d45 100644 --- a/scripts/run_qtlreaper.py +++ b/scripts/run_qtlreaper.py @@ -169,7 +169,7 @@ def dispatch(args: Namespace) -> int: logger.info("Successfully computed p values for %s traits.", len(_traitsdata)) return 0 except FileNotFoundError as fnf: - logger.error(", ".join(fnf.args), exc_info=False) + logger.error(", ".join(str(arg) for arg in fnf.args), exc_info=False) except AssertionError as aserr: logger.error(", ".join(aserr.args), exc_info=False) except Exception as _exc:# pylint: disable=[broad-exception-caught] diff --git a/tests/conftest.py b/tests/conftest.py index a716c52..2009aab 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -183,7 +183,7 @@ def redis_conn_with_completed_job_some_errors(redis_url, redis_ttl, jobs_prefix, def uploads_dir(client): # pylint: disable=[redefined-outer-name] """Returns the configured, uploads directory, creating it if it does not exist.""" - the_dir = client.application.config["UPLOAD_FOLDER"] + the_dir = client.application.config["UPLOADS_DIRECTORY"] if not os.path.exists(the_dir): os.mkdir(the_dir) diff --git a/tests/test_instance_dir/config.py b/tests/test_instance_dir/config.py index 2ee569b..f04b3df 100644 --- a/tests/test_instance_dir/config.py +++ b/tests/test_instance_dir/config.py @@ -6,6 +6,6 @@ import os LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING") SECRET_KEY = b"<Please! Please! Please! Change This!>" -UPLOAD_FOLDER = "/tmp/qc_app_files" +UPLOADS_DIRECTORY = "/tmp/qc_app_files" REDIS_URL = "redis://" JOBS_TTL_SECONDS = 600 # 10 minutes diff --git a/tests/uploader/test_parse.py b/tests/uploader/test_parse.py index 20c75b7..56e1b41 100644 --- a/tests/uploader/test_parse.py +++ b/tests/uploader/test_parse.py @@ -50,7 +50,7 @@ def test_parse_with_existing_uploaded_file( assert the_job["command"] == " ".join([ sys.executable, "-m", "scripts.validate_file", db_url, redis_url, jobs_prefix, job_id, "--redisexpiry", str(redis_ttl), str(speciesid), - filetype, f"{client.application.config['UPLOAD_FOLDER']}/{filename}"]) + filetype, f"{client.application.config['UPLOADS_DIRECTORY']}/{filename}"]) @pytest.mark.parametrize( "filename,uri,error_msgs", diff --git a/uploader/expression_data/dbinsert.py b/uploader/expression_data/dbinsert.py index 6d8ce80..7040698 100644 --- a/uploader/expression_data/dbinsert.py +++ b/uploader/expression_data/dbinsert.py @@ -94,7 +94,7 @@ def select_platform(): job = jobs.job(rconn, jobs.jobsnamespace(), job_id) if job: filename = job["filename"] - filepath = f"{app.config['UPLOAD_FOLDER']}/{filename}" + filepath = f"{app.config['UPLOADS_DIRECTORY']}/{filename}" if os.path.exists(filepath): default_species = 1 gchips = genechips() @@ -367,7 +367,7 @@ def insert_data(): assert form.get("datasetid"), "dataset" filename = form["filename"] - filepath = f"{app.config['UPLOAD_FOLDER']}/{filename}" + filepath = f"{app.config['UPLOADS_DIRECTORY']}/{filename}" redisurl = app.config["REDIS_URL"] if os.path.exists(filepath): with Redis.from_url(redisurl, decode_responses=True) as rconn: @@ -377,7 +377,7 @@ def insert_data(): form["species"], form["genechipid"], form["datasetid"], app.config["SQL_URI"], redisurl, app.config["JOBS_TTL_SECONDS"]), - redisurl, f"{app.config['UPLOAD_FOLDER']}/job_errors") + redisurl, f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return redirect(url_for("dbinsert.insert_status", job_id=job["jobid"])) return render_error(f"File '{filename}' no longer exists.") diff --git a/uploader/expression_data/views.py b/uploader/expression_data/views.py index 0b318b7..0e9b072 100644 --- a/uploader/expression_data/views.py +++ b/uploader/expression_data/views.py @@ -162,7 +162,7 @@ def upload_file(species_id: int, population_id: int): species=species, population=population) - upload_dir = app.config["UPLOAD_FOLDER"] + upload_dir = app.config["UPLOADS_DIRECTORY"] request_errors = errors(request) if request_errors: for error in request_errors: @@ -225,7 +225,7 @@ def parse_file(species_id: int, population_id: int): _errors = True if filename: - filepath = os.path.join(app.config["UPLOAD_FOLDER"], filename) + filepath = os.path.join(app.config["UPLOADS_DIRECTORY"], filename) if not os.path.exists(filepath): flash("Selected file does not exist (any longer)", "alert-danger") _errors = True @@ -241,7 +241,7 @@ def parse_file(species_id: int, population_id: int): species_id, filepath, filetype,# type: ignore[arg-type] app.config["JOBS_TTL_SECONDS"]), redisurl, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return redirect(url_for("species.populations.expression-data.parse_status", species_id=species_id, @@ -263,7 +263,7 @@ def parse_status(species_id: int, population_id: int, job_id: str): return render_template("no_such_job.html", job_id=job_id), 400 error_filename = jobs.error_filename( - job_id, f"{app.config['UPLOAD_FOLDER']}/job_errors") + job_id, f"{app.config['UPLOADS_DIRECTORY']}/job_errors") if os.path.exists(error_filename): stat = os.stat(error_filename) if stat.st_size > 0: @@ -345,7 +345,7 @@ def fail(species_id: int, population_id: int, job_id: str): if job: error_filename = jobs.error_filename( - job_id, f"{app.config['UPLOAD_FOLDER']}/job_errors") + job_id, f"{app.config['UPLOADS_DIRECTORY']}/job_errors") if os.path.exists(error_filename): stat = os.stat(error_filename) if stat.st_size > 0: diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py index b9841aa..06c417f 100644 --- a/uploader/phenotypes/models.py +++ b/uploader/phenotypes/models.py @@ -87,11 +87,14 @@ def phenotype_publication_data(conn, phenotype_id) -> Optional[dict]: return dict(res) -def dataset_phenotypes(conn: Connection, - population_id: int, - dataset_id: int, - offset: int = 0, - limit: Optional[int] = None) -> tuple[dict, ...]: +def dataset_phenotypes( + conn: Connection, + population_id: int, + dataset_id: int, + offset: int = 0, + limit: Optional[int] = None, + xref_ids: tuple[int, ...] = tuple() +) -> tuple[dict, ...]: """Fetch the actual phenotypes.""" _query = ( "SELECT pheno.*, pxr.Id AS xref_id, pxr.InbredSetId, ist.InbredSetCode " @@ -100,9 +103,11 @@ def dataset_phenotypes(conn: Connection, "INNER JOIN PublishFreeze AS pf ON pxr.InbredSetId=pf.InbredSetId " "INNER JOIN InbredSet AS ist ON pf.InbredSetId=ist.Id " "WHERE pxr.InbredSetId=%s AND pf.Id=%s") + ( + f" AND pxr.Id IN ({', '.join(['%s'] * len(xref_ids))})" + if len(xref_ids) > 0 else "") + ( f" LIMIT {limit} OFFSET {offset}" if bool(limit) else "") with conn.cursor(cursorclass=DictCursor) as cursor: - cursor.execute(_query, (population_id, dataset_id)) + cursor.execute(_query, (population_id, dataset_id) + xref_ids) debug_query(cursor, logger) return tuple(dict(row) for row in cursor.fetchall()) diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py index 2cf0ca0..776fa52 100644 --- a/uploader/phenotypes/views.py +++ b/uploader/phenotypes/views.py @@ -612,6 +612,11 @@ def load_phenotypes_success_handler(job): job_id=job["job_id"])) +def proceed_to_job_status(job): + app.logger.debug("The new job: %s", job) + return redirect(url_for("background-jobs.job_status", job_id=job["job_id"])) + + @phenotypesbp.route( "<int:species_id>/populations/<int:population_id>/phenotypes/datasets" "/<int:dataset_id>/load-data-to-database", @@ -654,11 +659,6 @@ def load_data_to_database( def __handle_error__(resp): return render_template("http-error.html", *resp.json()) - def __handle_success__(load_job): - app.logger.debug("The phenotypes loading job: %s", load_job) - return redirect(url_for( - "background-jobs.job_status", job_id=load_job["job_id"])) - return request_token( token_uri=urljoin(oauth2client.authserver_uri(), "auth/token"), @@ -689,7 +689,7 @@ def load_data_to_database( Path(f"{uploads_dir(app)}/job_errors"), worker_manager="gn_libs.jobs.launcher", loglevel=_loglevel) - ).either(__handle_error__, __handle_success__) + ).either(__handle_error__, proceed_to_job_status) def update_phenotype_metadata(conn, metadata: dict): @@ -1158,6 +1158,12 @@ def rerun_qtlreaper_success_handler(job): return return_to_dataset_view_handler(job, "QTLReaper ran successfully!") +def delete_phenotypes_success_handler(job): + """Handle success running the 'delete-phenotypes' script.""" + return return_to_dataset_view_handler( + job, "Phenotypes deleted successfully.") + + @phenotypesbp.route( "<int:species_id>/populations/<int:population_id>/phenotypes/datasets" "/<int:dataset_id>/delete", @@ -1174,7 +1180,22 @@ def delete_phenotypes(# pylint: disable=[unused-argument] **kwargs ): """Delete the specified phenotype data.""" - with database_connection(app.config["SQL_URI"]) as conn: + _dataset_page = redirect(url_for( + "species.populations.phenotypes.view_dataset", + species_id=species["SpeciesId"], + population_id=population["Id"], + dataset_id=dataset["Id"])) + + def __handle_error__(resp): + flash( + "Error retrieving authorisation token. Phenotype deletion " + "failed. Please try again later.", + "alert alert-danger") + return _dataset_page + + _jobs_db = app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"] + with (database_connection(app.config["SQL_URI"]) as conn, + sqlite3.connection(_jobs_db) as jobsconn): form = request.form xref_ids = tuple(int(item) for item in set(form.getlist("xref_ids"))) @@ -1186,16 +1207,68 @@ def delete_phenotypes(# pylint: disable=[unused-argument] population_id=population["Id"], dataset_id=dataset["Id"])) case "delete": - # delete everything - # python3 -m scripts.phenotypes.delete_phenotypes <mariadburi> <authdburi> <speciesid> <populationid> - # - # delete selected phenotypes - # python3 -m scripts.phenotypes.delete_phenotypes <mariadburi> <authdburi> <speciesid> <populationid> --xref-ids-file=/path/to/file.txt - return "Would actually delete the data!" + _loglevel = logging.getLevelName( + app.logger.getEffectiveLevel()).lower() + if form.get("confirm_delete_all_phenotypes", "") == "on": + _cmd = ["--delete-all"] + else: + # setup phenotypes xref_ids file + _xref_ids_file = Path( + app.config["SCRATCH_DIRECTORY"], + f"delete-phenotypes-{uuid.uuid4()}.txt") + with _xref_ids_file.open(mode="w", encoding="utf8") as ptr: + ptr.write("\n".join(str(_id) for _id in xref_ids)) + + _cmd = ["--xref_ids_file", str(_xref_ids_file)] + + _job_id = uuid.uuid4() + return request_token( + token_uri=urljoin( + oauth2client.authserver_uri(), "auth/token"), + user_id=session.user_details()["user_id"] + ).then( + lambda token: gnlibs_jobs.initialise_job( + jobsconn, + _job_id, + [ + sys.executable, + "-u", + "-m", + "scripts.phenotypes.delete_phenotypes", + "--log-level", _loglevel, + app.config["SQL_URI"], + str(species["SpeciesId"]), + str(population["Id"]), + str(dataset["Id"]), + app.config["AUTH_SERVER_URL"], + token["access_token"]] + _cmd, + "delete-phenotypes", + extra_meta={ + "species_id": species["SpeciesId"], + "population_id": population["Id"], + "dataset_id": dataset["Id"], + "success_handler": ( + "uploader.phenotypes.views." + "delete_phenotypes_success_handler") + }, + external_id=session.logged_in_user_id()) + ).then( + lambda _job: gnlibs_jobs.launch_job( + _job, + _jobs_db, + Path(f"{uploads_dir(app)}/job_errors"), + worker_manager="gn_libs.jobs.launcher", + loglevel=_loglevel) + ).either(__handle_error__, proceed_to_job_status) case _: + _phenos = tuple() + if len(xref_ids) > 0: + _phenos = dataset_phenotypes( + conn, population["Id"], dataset["Id"], xref_ids=xref_ids) + return render_template( "phenotypes/confirm-delete-phenotypes.html", species=species, population=population, dataset=dataset, - phenotypes=xref_ids) + phenotypes=_phenos) diff --git a/uploader/population/rqtl2.py b/uploader/population/rqtl2.py index 97d4854..bb5066e 100644 --- a/uploader/population/rqtl2.py +++ b/uploader/population/rqtl2.py @@ -134,7 +134,7 @@ def upload_rqtl2_bundle(species_id: int, population_id: int): try: app.logger.debug("Files in the form: %s", request.files) the_file = save_file(request.files["rqtl2_bundle_file"], - Path(app.config["UPLOAD_FOLDER"])) + Path(app.config["UPLOADS_DIRECTORY"])) except AssertionError: app.logger.debug(traceback.format_exc()) flash("Please provide a valid R/qtl2 zip bundle.", @@ -185,7 +185,7 @@ def trigger_rqtl2_bundle_qc( "rqtl2-bundle-file": str(rqtl2bundle.absolute()), "original-filename": originalfilename})}), redisuri, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return jobid @@ -895,7 +895,7 @@ def confirm_bundle_details(species_id: int, population_id: int): }) }), redisuri, - f"{app.config['UPLOAD_FOLDER']}/job_errors") + f"{app.config['UPLOADS_DIRECTORY']}/job_errors") return redirect(url_for("expression-data.rqtl2.rqtl2_processing_status", jobid=jobid)) diff --git a/uploader/samples/views.py b/uploader/samples/views.py index ee002ba..1c0569d 100644 --- a/uploader/samples/views.py +++ b/uploader/samples/views.py @@ -138,7 +138,7 @@ def upload_samples(species_id: int, population_id: int):#pylint: disable=[too-ma try: samples_file = save_file(request.files["samples_file"], - Path(app.config["UPLOAD_FOLDER"])) + Path(app.config["UPLOADS_DIRECTORY"])) except AssertionError: flash("You need to provide a file with the samples data.", "alert-error") @@ -176,7 +176,7 @@ def upload_samples(species_id: int, population_id: int):#pylint: disable=[too-ma }, external_id=session.logged_in_user_id()), _jobs_db, - Path(f"{app.config['UPLOAD_FOLDER']}/job_errors").absolute(), + Path(f"{app.config['UPLOADS_DIRECTORY']}/job_errors").absolute(), loglevel=logging.getLevelName( app.logger.getEffectiveLevel()).lower()) return redirect( diff --git a/uploader/static/css/layout-common.css b/uploader/static/css/layout-common.css index 88e580c..9c9d034 100644 --- a/uploader/static/css/layout-common.css +++ b/uploader/static/css/layout-common.css @@ -2,20 +2,20 @@ box-sizing: border-box; } - body { - display: grid; - grid-gap: 1em; - } +body { + display: grid; + grid-gap: 1em; +} - #header { - margin: -0.7em; /* Fill entire length of screen */ - /* Define layout for the children elements */ - display: grid; - } +#header { + margin: -0.7em; /* Fill entire length of screen */ + /* Define layout for the children elements */ + display: grid; +} - #header #header-nav { - /* Place it in the parent element */ - grid-column-start: 1; - grid-column-end: 2; - display: flex; - } +#header #header-nav { + /* Place it in the parent element */ + grid-column-start: 1; + grid-column-end: 2; + display: flex; +} diff --git a/uploader/static/css/layout-small.css b/uploader/static/css/layout-small.css index 80a3759..2e47217 100644 --- a/uploader/static/css/layout-small.css +++ b/uploader/static/css/layout-small.css @@ -2,7 +2,7 @@ body { display: grid; grid-template-columns: 1fr; - grid-template-rows: 1fr 2fr 7fr; + grid-template-rows: 1fr 90fr; grid-gap: 1em; } @@ -31,6 +31,11 @@ grid-column-end: 2; } + #header #header-nav ul { + display: grid; + grid-template-columns: 1fr; + } + #main { /* Place it in the parent element */ grid-column-start: 1; @@ -38,7 +43,7 @@ display: grid; /* Define layout for the children elements */ - grid-template-rows: 1.5em 80% 20%; + grid-template-rows: 1fr 80fr 20fr; grid-template-columns: 1fr; } diff --git a/uploader/static/js/utils.js b/uploader/static/js/utils.js index 1b31661..62d3662 100644 --- a/uploader/static/js/utils.js +++ b/uploader/static/js/utils.js @@ -28,7 +28,8 @@ var remove_class = (element, classvalue) => { var add_class = (element, classvalue) => { remove_class(element, classvalue); - element.attr("class", (element.attr("class") || "") + " " + classvalue); + element.attr("class", + ((element.attr("class") || "") + " " + classvalue).trim()); }; $(".not-implemented").click((event) => { diff --git a/uploader/templates/background-jobs/job-status.html b/uploader/templates/background-jobs/job-status.html index 50cf6e5..2e75c6d 100644 --- a/uploader/templates/background-jobs/job-status.html +++ b/uploader/templates/background-jobs/job-status.html @@ -30,12 +30,16 @@ <div class="row"> <h3 class="subheading">STDOUT</h3> - <pre>{{job["stdout"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stdout"]}}</pre> + </div> </div> <div class="row"> <h3 class="subheading">STDERR</h3> - <pre>{{job["stderr"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stderr"]}}</pre> + </div> </div> {%endblock%} diff --git a/uploader/templates/background-jobs/job-summary.html b/uploader/templates/background-jobs/job-summary.html index c2c2d6b..ef9ef6c 100644 --- a/uploader/templates/background-jobs/job-summary.html +++ b/uploader/templates/background-jobs/job-summary.html @@ -50,12 +50,16 @@ <div class="row"> <h3 class="subheading">Script Errors and Logging</h3> - <pre>{{job["stderr"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stderr"]}}</pre> + </div> </div> <div class="row"> <h3 class="subheading">Script Output</h3> - <pre>{{job["stdout"]}}</pre> + <div style="max-width: 40em; overflow: scroll"> + <pre>{{job["stdout"]}}</pre> + </div> </div> {%endblock%} diff --git a/uploader/templates/phenotypes/add-phenotypes-base.html b/uploader/templates/phenotypes/add-phenotypes-base.html index 690c7e1..c74a0fa 100644 --- a/uploader/templates/phenotypes/add-phenotypes-base.html +++ b/uploader/templates/phenotypes/add-phenotypes-base.html @@ -29,8 +29,7 @@ {%block frm_add_phenotypes_elements%}{%endblock%} - <fieldset id="fldset-publication-info"> - <legend>Publication Information</legend> + <h4>Publication Information</h4> <input type="hidden" name="publication-id" id="txt-publication-id" /> <span class="form-text text-muted"> Select a publication for your data. <br /> @@ -53,7 +52,6 @@ <tbody></tbody> </table> - </fieldset> <div class="form-group"> <input type="submit" diff --git a/uploader/templates/phenotypes/add-phenotypes-raw-files.html b/uploader/templates/phenotypes/add-phenotypes-raw-files.html index a02fae7..b1322b2 100644 --- a/uploader/templates/phenotypes/add-phenotypes-raw-files.html +++ b/uploader/templates/phenotypes/add-phenotypes-raw-files.html @@ -21,8 +21,7 @@ {%endblock%} {%block frm_add_phenotypes_elements%} -<fieldset id="fldset-file-metadata"> - <legend>File(s) Metadata</legend> + <h4>File(s) Metadata</h4> <div class="form-group"> <label for="txt-file-separator" class="form-label">File Separator</label> <div class="input-group"> @@ -89,12 +88,9 @@ <a href="#docs-file-na" title="Documentation for no-value fields"> documentation for more information</a>.</span> </div> -</fieldset> -<fieldset id="fldset-files"> <legend>Data File(s)</legend> - <fieldset id="fldset-descriptions-file"> <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-descriptions-transposed" @@ -145,10 +141,8 @@ {{display_preview_table( "tbl-preview-pheno-desc", "phenotype descriptions")}} </div> - </fieldset> - - <fieldset id="fldset-data-file"> + <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-data-transposed" @@ -196,11 +190,9 @@ on the expected format for the file provided here.</p>')}} {{display_preview_table("tbl-preview-pheno-data", "phenotype data")}} </div> - </fieldset> {%if population.Family in families_with_se_and_n%} - <fieldset id="fldset-se-file"> <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-se-transposed" @@ -247,10 +239,8 @@ {{display_preview_table("tbl-preview-pheno-se", "standard errors")}} </div> - </fieldset> - <fieldset id="fldset-n-file"> <div class="form-group"> <div class="form-check"> <input id="chk-phenotype-n-transposed" @@ -297,8 +287,6 @@ {{display_preview_table("tbl-preview-pheno-n", "number of samples/individuals")}} </div> - </fieldset> -</fieldset> {%endif%} {%endblock%} @@ -477,7 +465,7 @@ .map((field) => { var value = field.trim(); if(navalues.includes(value)) { - return "⋘NUL⋙"; + return "[NO-VALUE]"; } return value; }) diff --git a/uploader/templates/phenotypes/confirm-delete-phenotypes.html b/uploader/templates/phenotypes/confirm-delete-phenotypes.html index b59fd7b..e6d67c7 100644 --- a/uploader/templates/phenotypes/confirm-delete-phenotypes.html +++ b/uploader/templates/phenotypes/confirm-delete-phenotypes.html @@ -56,13 +56,16 @@ {%for phenotype in phenotypes%} <tr> <td> - <input id="chk-xref-id-{{phenotype}}" + <input id="chk-xref-id-{{phenotype.xref_id}}" name="xref_ids" type="checkbox" + value="{{phenotype.xref_id}}" class="chk-row-select" /> </td> - <td>{{phenotype}}</td> - <td>{{phenotype}} — Description</td> + <td>{{phenotype.xref_id}}</td> + <td>{{phenotype.Post_publication_description or + phenotype.Pre_publication_description or + phenotype.original_description}}</td> </tr> {%endfor%} </tbody> @@ -166,6 +169,27 @@ $("#btn-deselect-all-phenotypes").on("click", function(event) { dt.deselectAll(); }); + + $("#btn-delete-phenotypes-selected").on("click", function(event) { + event.preventDefault(); + form = $("#frm-delete-phenotypes-selected"); + form.find(".dynamically-added-element").remove(); + dt.rows({selected: true}).nodes().each(function(node, index) { + var xref_id = $(node) + .find('input[type="checkbox"]:checked') + .val(); + var chk = $('<input type="checkbox">'); + chk.attr("class", "dynamically-added-element"); + chk.attr("value", xref_id); + chk.attr("name", "xref_ids"); + chk.attr("style", "display: none"); + chk.prop("checked", true); + form.append(chk); + }); + form.append( + $('<input type="hidden" name="action" value="delete" />')); + form.submit(); + }) }); </script> {%endblock%} diff --git a/uploader/templates/phenotypes/macro-display-preview-table.html b/uploader/templates/phenotypes/macro-display-preview-table.html index d31c7fa..6dffe9f 100644 --- a/uploader/templates/phenotypes/macro-display-preview-table.html +++ b/uploader/templates/phenotypes/macro-display-preview-table.html @@ -1,19 +1,11 @@ {%macro display_preview_table(tableid, filetype)%} -<div class="card"> - <div class="card-body"> - <h5 class="card-title">{{filetype | title}}: File Preview</h5> - <div class="card-text" style="overflow: scroll;"> - <table id="{{tableid}}" class="table table-condensed"> - <thead> - <tr> - </tr> - <tbody> - <tr> - <td class="data-row-template text-info"></td> - </tr> - </tbody> - </table> - </div> - </div> +<div class="table-responsive" + style="max-width:39.2em;border-radius:5px;border: solid 1px;overflow-x: scroll;"> + <h5>{{filetype | title}}: File Preview</h5> + <table id="{{tableid}}" class="table"> + <thead><tr></tr></thead> + + <tbody></tbody> + </table> </div> {%endmacro%} diff --git a/uploader/templates/phenotypes/view-dataset.html b/uploader/templates/phenotypes/view-dataset.html index de76cbf..3bb2586 100644 --- a/uploader/templates/phenotypes/view-dataset.html +++ b/uploader/templates/phenotypes/view-dataset.html @@ -77,7 +77,6 @@ </form> </div> - {%if view_under_construction%} <div class="col"> <form id="frm-delete-phenotypes" method="POST" @@ -93,7 +92,6 @@ value="delete phenotypes" /> </form> </div> - {%endif%} </div> <div class="row" style="margin-top: 0.5em;"> |
