aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--scripts/qc_on_rqtl2_bundle2.py346
-rw-r--r--uploader/authorisation.py2
-rw-r--r--uploader/base_routes.py7
-rw-r--r--uploader/files/__init__.py1
-rw-r--r--uploader/phenotypes/models.py2
-rw-r--r--uploader/phenotypes/views.py67
-rw-r--r--uploader/population/rqtl2.py123
-rw-r--r--uploader/static/css/styles.css196
-rw-r--r--uploader/templates/base.html53
-rw-r--r--uploader/templates/phenotypes/add-phenotypes-raw-files.html2
-rw-r--r--uploader/templates/phenotypes/edit-phenotype.html2
-rw-r--r--uploader/templates/phenotypes/view-dataset.html67
-rw-r--r--uploader/templates/phenotypes/view-phenotype.html2
-rw-r--r--uploader/templates/populations/macro-display-population-card.html5
14 files changed, 207 insertions, 668 deletions
diff --git a/scripts/qc_on_rqtl2_bundle2.py b/scripts/qc_on_rqtl2_bundle2.py
deleted file mode 100644
index 7e5d253..0000000
--- a/scripts/qc_on_rqtl2_bundle2.py
+++ /dev/null
@@ -1,346 +0,0 @@
-"""Run Quality Control checks on R/qtl2 bundle."""
-import os
-import sys
-import json
-from time import sleep
-from pathlib import Path
-from zipfile import ZipFile
-from argparse import Namespace
-from datetime import timedelta
-import multiprocessing as mproc
-from functools import reduce, partial
-from logging import Logger, getLogger, StreamHandler
-from typing import Union, Sequence, Callable, Iterator
-
-import MySQLdb as mdb
-from redis import Redis
-
-from quality_control.errors import InvalidValue
-from quality_control.checks import decimal_points_error
-
-from uploader import jobs
-from uploader.db_utils import database_connection
-from uploader.check_connections import check_db, check_redis
-
-from r_qtl import r_qtl2 as rqtl2
-from r_qtl import r_qtl2_qc as rqc
-from r_qtl import exceptions as rqe
-from r_qtl import fileerrors as rqfe
-
-from scripts.process_rqtl2_bundle import parse_job
-from scripts.redis_logger import setup_redis_logger
-from scripts.cli_parser import init_cli_parser, add_global_data_arguments
-from scripts.rqtl2.bundleutils import build_line_joiner, build_line_splitter
-
-
-def check_for_missing_files(
- rconn: Redis, fqjobid: str, extractpath: Path, logger: Logger) -> bool:
- """Check that all files listed in the control file do actually exist."""
- logger.info("Checking for missing files.")
- missing = rqc.missing_files(extractpath)
- # add_to_errors(rconn, fqjobid, "errors-generic", tuple(
- # rqfe.MissingFile(
- # mfile[0], mfile[1], (
- # f"File '{mfile[1]}' is listed in the control file under "
- # f"the '{mfile[0]}' key, but it does not actually exist in "
- # "the bundle."))
- # for mfile in missing))
- if len(missing) > 0:
- logger.error(f"Missing files in the bundle!")
- return True
- return False
-
-
-def open_file(file_: Path) -> Iterator:
- """Open file and return one line at a time."""
- with open(file_, "r", encoding="utf8") as infile:
- for line in infile:
- yield line
-
-
-def check_markers(
- filename: str,
- row: tuple[str, ...],
- save_error: lambda val: val
-) -> tuple[rqfe.InvalidValue]:
- """Check that the markers are okay"""
- errors = tuple()
- counts = {}
- for marker in row:
- counts = {**counts, marker: counts.get(marker, 0) + 1}
- if marker is None or marker == "":
- errors = errors + (save_error(rqfe.InvalidValue(
- filename,
- "markers"
- "-",
- marker,
- "A marker MUST be a valid value.")),)
-
- return errors + tuple(
- save_error(rqfe.InvalidValue(
- filename,
- "markers",
- key,
- f"Marker '{key}' was repeated {value} times"))
- for key,value in counts.items() if value > 1)
-
-
-def check_geno_line(
- filename: str,
- headers: tuple[str, ...],
- row: tuple[Union[str, None]],
- cdata: dict,
- save_error: lambda val: val
-) -> tuple[rqfe.InvalidValue]:
- """Check that the geno line is correct."""
- errors = tuple()
- # Verify that line has same number of columns as headers
- if len(headers) != len(row):
- errors = errors + (save_error(rqfe.InvalidValue(
- filename,
- headers[0],
- row[0],
- row[0],
- "Every line MUST have the same number of columns.")),)
-
- # First column is the individuals/cases/samples
- if not bool(row[0]):
- errors = errors + (save_error(rqfe.InvalidValue(
- filename,
- headers[0],
- row[0],
- row[0],
- "The sample/case MUST be a valid value.")),)
-
- def __process_value__(val):
- if val in cdata["na.strings"]:
- return None
- if val in cdata["alleles"]:
- return cdata["genotypes"][val]
-
- genocode = cdata.get("genotypes", {})
- for coltitle, cellvalue in zip(headers[1:],row[1:]):
- if (
- bool(genocode) and
- cellvalue is not None and
- cellvalue not in genocode.keys()
- ):
- errors = errors + (save_error(rqfe.InvalidValue(
- filename, row[0], coltitle, cellvalue,
- f"Value '{cellvalue}' is invalid. Expected one of "
- f"'{', '.join(genocode.keys())}'.")),)
-
- return errors
-
-
-def push_file_error_to_redis(rconn: Redis, key: str, error: InvalidValue) -> InvalidValue:
- """Push the file error to redis a json string
-
- Parameters
- ----------
- rconn: Connection to redis
- key: The name of the list where we push the errors
- error: The file error to save
-
- Returns
- -------
- Returns the file error it saved
- """
- if bool(error):
- rconn.rpush(key, json.dumps(error._asdict()))
- return error
-
-
-def file_errors_and_details(
- redisargs: dict[str, str],
- file_: Path,
- filetype: str,
- cdata: dict,
- linesplitterfn: Callable,
- linejoinerfn: Callable,
- headercheckers: tuple[Callable, ...],
- bodycheckers: tuple[Callable, ...]
-) -> dict:
- """Compute errors, and other file metadata."""
- errors = tuple()
- if cdata[f"{filetype}_transposed"]:
- rqtl2.transpose_csv_with_rename(file_, linesplitterfn, linejoinerfn)
-
- with Redis.from_url(redisargs["redisuri"], decode_responses=True) as rconn:
- save_error_fn = partial(push_file_error_to_redis,
- rconn,
- error_list_name(filetype, file_.name))
- for lineno, line in enumerate(open_file(file_), start=1):
- row = linesplitterfn(line)
- if lineno == 1:
- headers = tuple(row)
- errors = errors + reduce(
- lambda errs, fnct: errs + fnct(
- file_.name, row[1:], save_error_fn),
- headercheckers,
- tuple())
- continue
-
- errors = errors + reduce(
- lambda errs, fnct: errs + fnct(
- file_.name, headers, row, cdata, save_error_fn),
- bodycheckers,
- tuple())
-
- filedetails = {
- "filename": file_.name,
- "filesize": os.stat(file_).st_size,
- "linecount": lineno
- }
- rconn.hset(redisargs["fqjobid"],
- f"file-details:{filetype}:{file_.name}",
- json.dumps(filedetails))
- return {**filedetails, "errors": errors}
-
-
-def error_list_name(filetype: str, filename: str):
- """Compute the name of the list where the errors will be pushed.
-
- Parameters
- ----------
- filetype: The type of file. One of `r_qtl.r_qtl2.FILE_TYPES`
- filename: The name of the file.
- """
- return f"errors:{filetype}:{filename}"
-
-
-def check_for_geno_errors(
- redisargs: dict[str, str],
- extractdir: Path,
- cdata: dict,
- linesplitterfn: Callable[[str], tuple[Union[str, None]]],
- linejoinerfn: Callable[[tuple[Union[str, None], ...]], str],
- logger: Logger
-) -> bool:
- """Check for errors in genotype files."""
- if "geno" in cdata or "founder_geno" in cdata:
- genofiles = tuple(
- extractdir.joinpath(fname) for fname in cdata.get("geno", []))
- fgenofiles = tuple(
- extractdir.joinpath(fname) for fname in cdata.get("founder_geno", []))
- allgenofiles = genofiles + fgenofiles
- with Redis.from_url(redisargs["redisuri"], decode_responses=True) as rconn:
- error_list_names = [
- error_list_name("geno", file_.name) for file_ in allgenofiles]
- for list_name in error_list_names:
- rconn.delete(list_name)
- rconn.hset(
- redisargs["fqjobid"],
- "geno-errors-lists",
- json.dumps(error_list_names))
- processes = [
- mproc.Process(target=file_errors_and_details,
- args=(
- redisargs,
- file_,
- ftype,
- cdata,
- linesplitterfn,
- linejoinerfn,
- (check_markers,),
- (check_geno_line,))
- )
- for ftype, file_ in (
- tuple(("geno", file_) for file_ in genofiles) +
- tuple(("founder_geno", file_) for file_ in fgenofiles))
- ]
- for process in processes:
- process.start()
- # Set expiry for any created error lists
- for key in error_list_names:
- rconn.expire(name=key,
- time=timedelta(seconds=redisargs["redisexpiry"]))
-
- # TOD0: Add the errors to redis
- if any(rconn.llen(errlst) > 0 for errlst in error_list_names):
- logger.error("At least one of the 'geno' files has (an) error(s).")
- return True
- logger.info("No error(s) found in any of the 'geno' files.")
-
- else:
- logger.info("No 'geno' files to check.")
-
- return False
-
-
-# def check_for_pheno_errors(...):
-# """Check for errors in phenotype files."""
-# pass
-
-
-# def check_for_phenose_errors(...):
-# """Check for errors in phenotype, standard-error files."""
-# pass
-
-
-# def check_for_phenocovar_errors(...):
-# """Check for errors in phenotype-covariates files."""
-# pass
-
-
-def run_qc(rconn: Redis, args: Namespace, fqjobid: str, logger: Logger) -> int:
- """Run quality control checks on R/qtl2 bundles."""
- thejob = parse_job(rconn, args.redisprefix, args.jobid)
- print(f"THE JOB =================> {thejob}")
- jobmeta = thejob["job-metadata"]
- inpath = Path(jobmeta["rqtl2-bundle-file"])
- extractdir = inpath.parent.joinpath(f"{inpath.name}__extraction_dir")
- with ZipFile(inpath, "r") as zfile:
- rqtl2.extract(zfile, extractdir)
-
- ### BEGIN: The quality control checks ###
- cdata = rqtl2.control_data(extractdir)
- splitter = build_line_splitter(cdata)
- joiner = build_line_joiner(cdata)
-
- redisargs = {
- "fqjobid": fqjobid,
- "redisuri": args.redisuri,
- "redisexpiry": args.redisexpiry
- }
- check_for_missing_files(rconn, fqjobid, extractdir, logger)
- # check_for_pheno_errors(...)
- check_for_geno_errors(redisargs, extractdir, cdata, splitter, joiner, logger)
- # check_for_phenose_errors(...)
- # check_for_phenocovar_errors(...)
- ### END: The quality control checks ###
-
- def __fetch_errors__(rkey: str) -> tuple:
- return tuple(json.loads(rconn.hget(fqjobid, rkey) or "[]"))
-
- return (1 if any((
- bool(__fetch_errors__(key))
- for key in
- ("errors-geno", "errors-pheno", "errors-phenos", "errors-phenocovar")))
- else 0)
-
-
-if __name__ == "__main__":
- def main():
- """Enter R/qtl2 bundle QC runner."""
- args = add_global_data_arguments(init_cli_parser(
- "qc-on-rqtl2-bundle", "Run QC on R/qtl2 bundle.")).parse_args()
- check_redis(args.redisuri)
- check_db(args.databaseuri)
-
- logger = getLogger("qc-on-rqtl2-bundle")
- logger.addHandler(StreamHandler(stream=sys.stderr))
- logger.setLevel("DEBUG")
-
- fqjobid = jobs.job_key(args.redisprefix, args.jobid)
- with Redis.from_url(args.redisuri, decode_responses=True) as rconn:
- logger.addHandler(setup_redis_logger(
- rconn, fqjobid, f"{fqjobid}:log-messages",
- args.redisexpiry))
-
- exitcode = run_qc(rconn, args, fqjobid, logger)
- rconn.hset(
- jobs.job_key(args.redisprefix, args.jobid), "exitcode", exitcode)
- return exitcode
-
- sys.exit(main())
diff --git a/uploader/authorisation.py b/uploader/authorisation.py
index ee8fe97..bd3454c 100644
--- a/uploader/authorisation.py
+++ b/uploader/authorisation.py
@@ -18,7 +18,7 @@ def require_login(function):
"""Check that the user is logged in and their token is valid."""
def __clear_session__(_no_token):
session.clear_session_info()
- flash("You need to be logged in.", "alert-danger")
+ flash("You need to be logged in.", "alert-danger big-alert")
return redirect("/")
return session.user_token().either(
diff --git a/uploader/base_routes.py b/uploader/base_routes.py
index 742a254..326086f 100644
--- a/uploader/base_routes.py
+++ b/uploader/base_routes.py
@@ -46,6 +46,13 @@ def jquery(filename):
appenv(), f"share/genenetwork2/javascript/jquery/{filename}")
+@base.route("/datatables/<path:filename>")
+def datatables(filename):
+ """Fetch DataTables files."""
+ return send_from_directory(
+ appenv(), f"share/genenetwork2/javascript/DataTables/{filename}")
+
+
@base.route("/node-modules/<path:filename>")
def node_modules(filename):
"""Fetch node-js modules."""
diff --git a/uploader/files/__init__.py b/uploader/files/__init__.py
index 60d2f3b..53c3176 100644
--- a/uploader/files/__init__.py
+++ b/uploader/files/__init__.py
@@ -1,3 +1,4 @@
+"""General files and chunks utilities."""
from .chunks import chunked_binary_read
from .functions import (fullpath,
save_file,
diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py
index c9afc22..e1ec0c9 100644
--- a/uploader/phenotypes/models.py
+++ b/uploader/phenotypes/models.py
@@ -75,7 +75,7 @@ def dataset_phenotypes(conn: mdb.Connection,
limit: Optional[int] = None) -> tuple[dict, ...]:
"""Fetch the actual phenotypes."""
_query = (
- "SELECT pheno.*, pxr.Id, ist.InbredSetCode FROM Phenotype AS pheno "
+ "SELECT pheno.*, pxr.Id AS xref_id, ist.InbredSetCode FROM Phenotype AS pheno "
"INNER JOIN PublishXRef AS pxr ON pheno.Id=pxr.PhenotypeId "
"INNER JOIN PublishFreeze AS pf ON pxr.InbredSetId=pf.InbredSetId "
"INNER JOIN InbredSet AS ist ON pf.InbredSetId=ist.Id "
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index d283e47..ddec54c 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -3,12 +3,14 @@ import sys
import uuid
import json
import datetime
+from typing import Any
from pathlib import Path
from zipfile import ZipFile
from functools import wraps, reduce
from logging import INFO, ERROR, DEBUG, FATAL, CRITICAL, WARNING
from redis import Redis
+from pymonad.either import Left
from requests.models import Response
from MySQLdb.cursors import DictCursor
from gn_libs.mysqldb import database_connection
@@ -195,12 +197,10 @@ def view_dataset(# pylint: disable=[unused-argument]
phenotype_count=phenotypes_count(
conn, population["Id"], dataset["Id"]),
phenotypes=enumerate_sequence(
- dataset_phenotypes(conn,
- population["Id"],
- dataset["Id"],
- offset=start_at,
- limit=count),
- start=start_at+1),
+ dataset_phenotypes(
+ conn,
+ population["Id"],
+ dataset["Id"])),
start_from=start_at,
count=count,
activelink="view-dataset")
@@ -229,6 +229,11 @@ def view_phenotype(# pylint: disable=[unused-argument]
population["Id"],
dataset["Id"],
xref_id)
+ def __non_empty__(value) -> bool:
+ if isinstance(value, str):
+ return value.strip() != ""
+ return bool(value)
+
return render_template(
"phenotypes/view-phenotype.html",
species=species,
@@ -236,14 +241,13 @@ def view_phenotype(# pylint: disable=[unused-argument]
dataset=dataset,
xref_id=xref_id,
phenotype=phenotype,
- has_se=all(bool(item.get("error")) for item in phenotype["data"]),
+ has_se=any(bool(item.get("error")) for item in phenotype["data"]),
publish_data={
key.replace("_", " "): val
for key,val in
(phenotype_publication_data(conn, phenotype["Id"]) or {}).items()
if (key in ("PubMed_ID", "Authors", "Title", "Journal")
- and val is not None
- and val.strip() != "")
+ and __non_empty__(val))
},
privileges=(privileges
### For demo! Do not commit this part
@@ -520,7 +524,7 @@ def job_status(
@phenotypesbp.route(
"<int:species_id>/populations/<int:population_id>/phenotypes/datasets"
- "/<int:dataset_id>/review-job/<uuid:job_id>",
+ "/<int:dataset_id>/job/<uuid:job_id>/review",
methods=["GET"])
@require_login
@with_dataset(
@@ -548,11 +552,12 @@ def review_job_data(
filetype: (by_type.get(filetype, tuple())
+ ({"filename": item[0], **item[1]},))
}
- metadata = reduce(__metadata_by_type__,
- (jobs.job_files_metadata(
- rconn, jobs.jobsnamespace(), job['jobid'])
- if job else {}).items(),
- {})
+ metadata: dict[str, Any] = reduce(
+ __metadata_by_type__,
+ (jobs.job_files_metadata(
+ rconn, jobs.jobsnamespace(), job['jobid'])
+ if job else {}).items(),
+ {})
def __desc__(filetype):
match filetype:
@@ -593,6 +598,7 @@ def review_job_data(
def update_phenotype_metadata(conn, metadata: dict):
+ """Update a phenotype's basic metadata values."""
with conn.cursor(cursorclass=DictCursor) as cursor:
cursor.execute("SELECT * FROM Phenotype WHERE Id=%(phenotype-id)s",
metadata)
@@ -623,6 +629,7 @@ def update_phenotype_metadata(conn, metadata: dict):
def update_phenotype_values(conn, values):
+ """Update a phenotype's data values."""
with conn.cursor() as cursor:
cursor.executemany(
"UPDATE PublishData SET value=%(new)s "
@@ -637,6 +644,7 @@ def update_phenotype_values(conn, values):
def update_phenotype_se(conn, serrs):
+ """Update a phenotype's standard-error values."""
with conn.cursor() as cursor:
cursor.executemany(
"INSERT INTO PublishSE(DataId, StrainId, error) "
@@ -652,6 +660,7 @@ def update_phenotype_se(conn, serrs):
def update_phenotype_n(conn, counts):
+ """Update a phenotype's strain counts."""
with conn.cursor() as cursor:
cursor.executemany(
"INSERT INTO NStrain(DataId, StrainId, count) "
@@ -680,9 +689,25 @@ def update_phenotype_data(conn, data: dict):
def __separate_items__(acc, row):
key, val = row
- return ({**acc[0], key: {**val["value"], "changed?": (not val["value"]["new"] == val["value"]["original"])}},
- {**acc[1], key: {**val["se"] , "changed?": (not val["se"]["new"] == val["se"]["original"])}},
- {**acc[2], key: {**val["n"] , "changed?": (not val["n"]["new"] == val["n"]["original"])}})
+ return ({
+ **acc[0],
+ key: {
+ **val["value"],
+ "changed?": (not val["value"]["new"] == val["value"]["original"])
+ }
+ }, {
+ **acc[1],
+ key: {
+ **val["se"],
+ "changed?": (not val["se"]["new"] == val["se"]["original"])
+ }
+ },{
+ **acc[2],
+ key: {
+ **val["n"],
+ "changed?": (not val["n"]["new"] == val["n"]["original"])
+ }
+ })
values, serrs, counts = tuple(
tuple({
@@ -691,8 +716,8 @@ def update_phenotype_data(conn, data: dict):
"new": row[1]["new"]
} for row in item)
for item in (
- filter(lambda val: val[1]["changed?"], item.items())
- for item in reduce(
+ filter(lambda val: val[1]["changed?"], item.items())# type: ignore[arg-type]
+ for item in reduce(# type: ignore[var-annotated]
__separate_items__,
reduce(__organise_by_dataid_and_strainid__,
data.items(),
@@ -713,7 +738,7 @@ def update_phenotype_data(conn, data: dict):
species_redirect_uri="species.populations.phenotypes.index",
population_redirect_uri="species.populations.phenotypes.select_population",
redirect_uri="species.populations.phenotypes.list_datasets")
-def edit_phenotype_data(
+def edit_phenotype_data(# pylint: disable=[unused-argument]
species: dict,
population: dict,
dataset: dict,
diff --git a/uploader/population/rqtl2.py b/uploader/population/rqtl2.py
index 436eca0..044cdd4 100644
--- a/uploader/population/rqtl2.py
+++ b/uploader/population/rqtl2.py
@@ -11,13 +11,11 @@ from typing import Union, Callable, Optional
import MySQLdb as mdb
from redis import Redis
from MySQLdb.cursors import DictCursor
-from werkzeug.utils import secure_filename
from gn_libs.mysqldb import database_connection
from flask import (
flash,
escape,
request,
- jsonify,
url_for,
redirect,
Response,
@@ -191,127 +189,6 @@ def trigger_rqtl2_bundle_qc(
return jobid
-def chunk_name(uploadfilename: str, chunkno: int) -> str:
- """Generate chunk name from original filename and chunk number"""
- if uploadfilename == "":
- raise ValueError("Name cannot be empty!")
- if chunkno < 1:
- raise ValueError("Chunk number must be greater than zero")
- return f"{secure_filename(uploadfilename)}_part_{chunkno:05d}"
-
-
-def chunks_directory(uniqueidentifier: str) -> Path:
- """Compute the directory where chunks are temporarily stored."""
- if uniqueidentifier == "":
- raise ValueError("Unique identifier cannot be empty!")
- return Path(app.config["UPLOAD_FOLDER"], f"tempdir_{uniqueidentifier}")
-
-
-@rqtl2.route(("<int:species_id>/populations/<int:population_id>/rqtl2/"
- "/rqtl2-bundle-chunked"),
- methods=["GET"])
-@require_login
-def upload_rqtl2_bundle_chunked_get(# pylint: disable=["unused-argument"]
- species_id: int,
- population_id: int
-):
- """
- Extension to the `upload_rqtl2_bundle` endpoint above that provides a way
- for testing whether all the chunks have been uploaded and to assist with
- resuming a failed expression-data.
- """
- fileid = request.args.get("resumableIdentifier", type=str) or ""
- filename = request.args.get("resumableFilename", type=str) or ""
- chunk = request.args.get("resumableChunkNumber", type=int) or 0
- if not(fileid or filename or chunk):
- return jsonify({
- "message": "At least one required query parameter is missing.",
- "error": "BadRequest",
- "statuscode": 400
- }), 400
-
- if Path(chunks_directory(fileid),
- chunk_name(filename, chunk)).exists():
- return "OK"
-
- return jsonify({
- "message": f"Chunk {chunk} was not found.",
- "error": "NotFound",
- "statuscode": 404
- }), 404
-
-
-def __merge_chunks__(targetfile: Path, chunkpaths: tuple[Path, ...]) -> Path:
- """Merge the chunks into a single file."""
- with open(targetfile, "ab") as _target:
- for chunkfile in chunkpaths:
- with open(chunkfile, "rb") as _chunkdata:
- _target.write(_chunkdata.read())
-
- chunkfile.unlink()
- return targetfile
-
-
-@rqtl2.route(("<int:species_id>/population/<int:population_id>/rqtl2/upload/"
- "/rqtl2-bundle-chunked"),
- methods=["POST"])
-@require_login
-def upload_rqtl2_bundle_chunked_post(species_id: int, population_id: int):
- """
- Extension to the `upload_rqtl2_bundle` endpoint above that allows large
- files to be uploaded in chunks.
-
- This should hopefully speed up uploads, and if done right, even enable
- resumable uploads
- """
- _totalchunks = request.form.get("resumableTotalChunks", type=int) or 0
- _chunk = request.form.get("resumableChunkNumber", default=1, type=int)
- _uploadfilename = request.form.get(
- "resumableFilename", default="", type=str) or ""
- _fileid = request.form.get(
- "resumableIdentifier", default="", type=str) or ""
- _targetfile = Path(app.config["UPLOAD_FOLDER"], _fileid)
-
- if _targetfile.exists():
- return jsonify({
- "message": (
- "A file with a similar unique identifier has previously been "
- "uploaded and possibly is/has being/been processed."),
- "error": "BadRequest",
- "statuscode": 400
- }), 400
-
- try:
- # save chunk data
- chunks_directory(_fileid).mkdir(exist_ok=True, parents=True)
- request.files["file"].save(Path(chunks_directory(_fileid),
- chunk_name(_uploadfilename, _chunk)))
-
- # Check whether upload is complete
- chunkpaths = tuple(
- Path(chunks_directory(_fileid), chunk_name(_uploadfilename, _achunk))
- for _achunk in range(1, _totalchunks+1))
- if all(_file.exists() for _file in chunkpaths):
- # merge_files and clean up chunks
- __merge_chunks__(_targetfile, chunkpaths)
- chunks_directory(_fileid).rmdir()
- jobid = trigger_rqtl2_bundle_qc(
- species_id, population_id, _targetfile, _uploadfilename)
- return url_for(
- "expression-data.rqtl2.rqtl2_bundle_qc_status", jobid=jobid)
- except Exception as exc:# pylint: disable=[broad-except]
- msg = "Error processing uploaded file chunks."
- app.logger.error(msg, exc_info=True, stack_info=True)
- return jsonify({
- "message": msg,
- "error": type(exc).__name__,
- "error-description": " ".join(str(arg) for arg in exc.args),
- "error-trace": traceback.format_exception(exc)
- }), 500
-
- return "OK"
-
-
@rqtl2.route("/upload/species/rqtl2-bundle/qc-status/<uuid:jobid>",
methods=["GET", "POST"])
@require_login
diff --git a/uploader/static/css/styles.css b/uploader/static/css/styles.css
index 7bd51a9..a1107d5 100644
--- a/uploader/static/css/styles.css
+++ b/uploader/static/css/styles.css
@@ -1,170 +1,134 @@
+* {
+ box-sizing: border-box;
+}
+
body {
margin: 0.7em;
- box-sizing: border-box;
display: grid;
- grid-template-columns: 1fr 6fr;
- grid-template-rows: 4em 100%;
+ grid-template-columns: 1fr 9fr;
grid-gap: 20px;
font-family: Georgia, Garamond, serif;
font-style: normal;
+ font-size: 20px;
}
#header {
- grid-column: 1/3;
- width: 100%;
- /* background: cyan; */
- padding-top: 0.5em;
- border-radius: 0.5em;
+ /* Place it in the parent element */
+ grid-column-start: 1;
+ grid-column-end: 3;
+
+ /* Define layout for the children elements */
+ display: grid;
+ grid-template-columns: 8fr 2fr;
+ /* Content styling */
background-color: #336699;
- border-color: #080808;
color: #FFFFFF;
- background-image: none;
+ border-radius: 3px;
}
-#header .header {
- font-size: 1.7em;
- display: inline-block;
- text-align: start;
-}
+#header #header-text {
+ /* Place it in the parent element */
+ grid-column-start: 1;
+ grid-column-end: 2;
-#header .header-nav {
- display: inline-block;
- color: #FFFFFF;
+ /* Content styling */
+ font-size: 1.7em;
+ padding-left: 1em;
}
-#header .header-nav li {
- border-width: 1px;
- border-color: #FFFFFF;
- vertical-align: middle;
- margin: 0.01em;
- border-style: solid;
- border-width: 2px;
- border-radius: 0.5em;
- text-align: center;
+#header #header-nav {
+ /* Place it in the parent element */
+ grid-column-start: 2;
+ grid-column-end: 3;
}
-#header .header-nav a {
+#header #header-nav .nav li a {
+ /* Content styling */
color: #FFFFFF;
- text-decoration: none;
+ background: #4477AA;
+ border: solid 5px #336699;
+ border-radius: 5px;
+ font-size: 0.7em;
+ text-align: center;
}
#nav-sidebar {
- grid-column: 1/2;
- /* background: #e5e5ff; */
- padding-top: 0.5em;
- border-radius: 0.5em;
- font-size: 1.2em;
+ /* Place it in the parent element */
+ grid-column-start: 1;
+ grid-column-end: 2;
}
-#main {
- grid-column: 2/3;
- width: 100%;
- /* background: gray; */
+#nav-sidebar .nav li a:hover {
border-radius: 0.5em;
}
-.pagetitle {
- line-height: 1;
- padding-top: 0.2em;
- /* background: pink; */
+#nav-sidebar .nav .activemenu {
+ border-style: solid;
border-radius: 0.5em;
- /* background-color: #6699CC; */
- /* background-color: #77AADD; */
- background-color: #88BBEE;
-}
-
-.pagetitle .title {
- text-align: start;
- text-transform: capitalize;
- padding-left: 0.5em;
- font-size: 1.7em;
-}
-
-.pagetitle .breadcrumb {
- background: none;
-}
-
-.pagetitle .breadcrumb .active a {
- color: #333333;
+ border-color: #AAAAAA;
+ background-color: #EFEFEF;
}
-.pagetitle .breadcrumb a {
- color: #666666;
-}
+#main {
+ /* Place it in the parent element */
+ grid-column-start: 2;
+ grid-column-end: 3;
-.main-content {
- font-size: 1.275em;
+ /* Define layout for the children elements */
+ display: grid;
+ grid-template-columns: 1fr;
+ grid-template-rows: 4em 100%;
+ grid-gap: 1em;
}
-.breadcrumb {
- text-transform: capitalize;
-}
+#main #pagetitle {
+ /* Place it in the parent element */
+ grid-column-start: 1;
+ grid-column-end: 3;
-dd {
- margin-left: 3em;
- font-size: 0.88em;
- padding-bottom: 1em;
+ /* Content-styling */
+ border-radius: 3px;
+ background-color: #88BBEE;
}
-input[type="submit"], .btn {
+#main #pagetitle .title {
+ font-size: 1.4em;
text-transform: capitalize;
+ padding-left: 0.5em;
}
-.card {
- margin-top: 0.3em;
- border-width: 1px;
- border-style: solid;
- border-radius: 0.3em;
- border-color: #AAAAAA;
- padding: 0.5em;
-}
+#main #all-content {
+ /* Place it in the parent element */
+ grid-column-start: 1;
+ grid-column-end: 3;
-.activemenu {
- border-style: solid;
- border-radius: 0.5em;
- border-color: #AAAAAA;
- background-color: #EFEFEF;
+ /* Define layout for the children elements */
+ display: grid;
+ grid-template-columns: 7fr 3fr; /* For a maximum screen width of 1366 pixels */
+ grid-gap: 1.5em;
}
-.danger {
- color: #A94442;
- border-color: #DCA7A7;
- background-color: #F2DEDE;
+#main #all-content .row {
+ margin: 0 2px;
}
-.heading {
- border-bottom: solid #EEBB88;
- text-transform: capitalize;
+#main #all-content #main-content {
+ background: #FFFFFF;
+ max-width: 950px;
}
-.subheading {
- padding: 1em 0 0.1em 0.5em;
- border-bottom: solid #88BBEE;
+#pagetitle .breadcrumb {
+ background: none;
text-transform: capitalize;
+ font-size: 0.75em;
}
-form {
- margin-top: 0.3em;
- background: #E5E5FF;
- padding: 0.5em;
- border-radius:0.5em;
-}
-
-form .form-control {
- background-color: #EAEAFF;
-}
-
-.table-form-table thead {
- background: #E5E5FF;
-}
-
-
-.sidebar-content .card .card-title {
- font-size: 1.5em;
+#pagetitle .breadcrumb .active a {
+ color: #333333;
}
-.sidebar-content .card-text table tbody td:nth-child(1) {
- font-weight: bolder;
+#pagetitle .breadcrumb a {
+ color: #666666;
}
diff --git a/uploader/templates/base.html b/uploader/templates/base.html
index c124b13..c37e1f3 100644
--- a/uploader/templates/base.html
+++ b/uploader/templates/base.html
@@ -23,25 +23,24 @@
</head>
<body>
- <header id="header" class="container-fluid">
- <div class="row">
- <span class="header col-lg-9">GeneNetwork Data Quality Control and Upload</span>
- <nav class="header-nav col-lg-3">
- <ul class="nav justify-content-end">
- <li>
- {%if user_logged_in()%}
- <a href="{{url_for('oauth2.logout')}}"
- title="Log out of the system">{{user_email()}} &mdash; Log Out</a>
- {%else%}
- <a href="{{authserver_authorise_uri()}}"
- title="Log in to the system">Log In</a>
- {%endif%}
- </li>
- </ul>
- </nav>
+ <header id="header">
+ <span id="header-text">GeneNetwork Data Quality Control and Upload</span>
+ <nav id="header-nav">
+ <ul class="nav justify-content-end">
+ <li>
+ {%if user_logged_in()%}
+ <a href="{{url_for('oauth2.logout')}}"
+ title="Log out of the system">{{user_email()}} &mdash; Log Out</a>
+ {%else%}
+ <a href="{{authserver_authorise_uri()}}"
+ title="Log in to the system">Log In</a>
+ {%endif%}
+ </li>
+ </ul>
+ </nav>
</header>
- <aside id="nav-sidebar" class="container-fluid">
+ <aside id="nav-sidebar">
<ul class="nav flex-column">
<li {%if activemenu=="home"%}class="activemenu"{%endif%}>
<a href="/" >Home</a></li>
@@ -90,9 +89,9 @@
</ul>
</aside>
- <main id="main" class="main container-fluid">
+ <main id="main" class="main">
- <div class="pagetitle row">
+ <div id="pagetitle" class="pagetitle">
<span class="title">GN Uploader: {%block pagetitle%}{%endblock%}</span>
<nav>
<ol class="breadcrumb">
@@ -108,14 +107,12 @@
</nav>
</div>
- <div class="row">
- <div class="container-fluid">
- <div class="col-md-8 main-content">
- {%block contents%}{%endblock%}
- </div>
- <div class="sidebar-content col-md-4">
- {%block sidebarcontents%}{%endblock%}
- </div>
+ <div id="all-content">
+ <div id="main-content">
+ {%block contents%}{%endblock%}
+ </div>
+ <div id="sidebar-content">
+ {%block sidebarcontents%}{%endblock%}
</div>
</div>
</main>
@@ -127,7 +124,5 @@
filename='js/bootstrap.min.js')}}"></script>
<script type="text/javascript" src="/static/js/misc.js"></script>
{%block javascript%}{%endblock%}
-
</body>
-
</html>
diff --git a/uploader/templates/phenotypes/add-phenotypes-raw-files.html b/uploader/templates/phenotypes/add-phenotypes-raw-files.html
index d9a8424..7f8d8b0 100644
--- a/uploader/templates/phenotypes/add-phenotypes-raw-files.html
+++ b/uploader/templates/phenotypes/add-phenotypes-raw-files.html
@@ -600,10 +600,12 @@
console.log("SUCCESS DATA: ", data);
console.log("SUCCESS STATUS: ", textstatus);
console.log("SUCCESS jqXHR: ", jqxhr);
+ window.location.assign(window.location.origin + data["redirect-to"]);
},
});
return false;
}
+ return false;
};
var uploadSuccess = (file_input_name) => {
diff --git a/uploader/templates/phenotypes/edit-phenotype.html b/uploader/templates/phenotypes/edit-phenotype.html
index 260d032..32c903f 100644
--- a/uploader/templates/phenotypes/edit-phenotype.html
+++ b/uploader/templates/phenotypes/edit-phenotype.html
@@ -25,7 +25,7 @@
<div class="row">
<h2 class="heading">edit phenotype data</h2>
- <p>The two (2) forms provided in this page help you update the data for the
+ <p>The forms provided in this page help you update the data for the
phenotypes, and the publication information for the phenotype,
respectively.</p>
</div>
diff --git a/uploader/templates/phenotypes/view-dataset.html b/uploader/templates/phenotypes/view-dataset.html
index 011f8f6..4e1be6b 100644
--- a/uploader/templates/phenotypes/view-dataset.html
+++ b/uploader/templates/phenotypes/view-dataset.html
@@ -5,6 +5,11 @@
{%block title%}Phenotypes{%endblock%}
+{%block css%}
+<link rel="stylesheet"
+ href="{{url_for('base.datatables', filename='css/jquery.dataTables.css')}}" />
+{%endblock%}
+
{%block pagetitle%}Phenotypes{%endblock%}
{%block lvl4_breadcrumbs%}
@@ -56,12 +61,7 @@
<div class="row">
<h2>Phenotype Data</h2>
-
- <p>This dataset has a total of {{phenotype_count}} phenotypes.</p>
-
- {{table_pagination(start_from, count, phenotype_count, url_for('species.populations.phenotypes.view_dataset', species_id=species.SpeciesId, population_id=population.Id, dataset_id=dataset.Id), "phenotypes")}}
-
- <table class="table">
+ <table id="tbl-phenotypes-list" class="table">
<thead>
<tr>
<th>#</th>
@@ -70,24 +70,7 @@
</tr>
</thead>
- <tbody>
- {%for pheno in phenotypes%}
- <tr>
- <td>{{pheno.sequence_number}}</td>
- <td><a href="{{url_for('species.populations.phenotypes.view_phenotype',
- species_id=species.SpeciesId,
- population_id=population.Id,
- dataset_id=dataset.Id,
- xref_id=pheno['pxr.Id'])}}"
- title="View phenotype details"
- target="_blank">
- {{pheno.InbredSetCode}}_{{pheno["pxr.Id"]}}</a></td>
- <td>{{pheno.Post_publication_description or pheno.Pre_publication_abbreviation or pheno.Original_description}}</td>
- </tr>
- {%else%}
- <tr><td colspan="5"></td></tr>
- {%endfor%}
- </tbody>
+ <tbody></tbody>
</table>
</div>
{%endblock%}
@@ -95,3 +78,39 @@
{%block sidebarcontents%}
{{display_population_card(species, population)}}
{%endblock%}
+
+
+{%block javascript%}
+<script src="{{url_for('base.datatables',
+ filename='js/jquery.dataTables.js')}}"></script>
+<script type="text/javascript">
+ $(function() {
+ $("#tbl-phenotypes-list").DataTable({
+ responsive: true,
+ data: {{phenotypes | tojson}},
+ columns: [
+ {data: "sequence_number"},
+ {
+ data: function(pheno) {
+ var spcs_id = {{species.SpeciesId}};
+ var pop_id = {{population.Id}};
+ var dtst_id = {{dataset.Id}};
+ return `<a href="/species/${spcs_id}` +
+ `/populations/${pop_id}` +
+ `/phenotypes/datasets/${dtst_id}` +
+ `/phenotype/${pheno.xref_id}` +
+ `" target="_blank">` +
+ `${pheno.InbredSetCode}_${pheno.xref_id}` +
+ `</a>`;
+ }
+ },
+ {data: function(pheno) {
+ return (pheno.Post_publication_description ||
+ pheno.Original_description ||
+ pheno.Pre_publication_description);
+ }}
+ ]
+ });
+ });
+</script>
+{%endblock%}
diff --git a/uploader/templates/phenotypes/view-phenotype.html b/uploader/templates/phenotypes/view-phenotype.html
index b42f680..21ac501 100644
--- a/uploader/templates/phenotypes/view-phenotype.html
+++ b/uploader/templates/phenotypes/view-phenotype.html
@@ -98,7 +98,7 @@ or "group:resource:delete-resource" in privileges%}
<th>Sample</th>
<th>Value</th>
{%if has_se%}
- <th>SE: {{has_se}}</th>
+ <th>SE</th>
<th>N</th>
{%endif%}
</tr>
diff --git a/uploader/templates/populations/macro-display-population-card.html b/uploader/templates/populations/macro-display-population-card.html
index 79f7925..16b477f 100644
--- a/uploader/templates/populations/macro-display-population-card.html
+++ b/uploader/templates/populations/macro-display-population-card.html
@@ -33,11 +33,6 @@
<td>Family</td>
<td>{{population.Family}}</td>
</tr>
-
- <tr>
- <td>Description</td>
- <td>{{(population.Description or "")[0:500]}}&hellip;</td>
- </tr>
</tbody>
</table>
</div>