aboutsummaryrefslogtreecommitdiff
path: root/uploader
diff options
context:
space:
mode:
Diffstat (limited to 'uploader')
-rw-r--r--uploader/__init__.py2
-rw-r--r--uploader/files/__init__.py4
-rw-r--r--uploader/files/chunks.py32
-rw-r--r--uploader/files/functions.py (renamed from uploader/files.py)14
-rw-r--r--uploader/files/views.py97
-rw-r--r--uploader/phenotypes/views.py135
-rw-r--r--uploader/static/js/files.js119
-rw-r--r--uploader/templates/phenotypes/add-phenotypes-base.html127
-rw-r--r--uploader/templates/phenotypes/add-phenotypes-raw-files.html277
-rw-r--r--uploader/templates/phenotypes/add-phenotypes-with-rqtl2-bundle.html4
-rw-r--r--uploader/templates/phenotypes/macro-display-preview-table.html21
11 files changed, 706 insertions, 126 deletions
diff --git a/uploader/__init__.py b/uploader/__init__.py
index 9fdb383..cae531b 100644
--- a/uploader/__init__.py
+++ b/uploader/__init__.py
@@ -11,6 +11,7 @@ from uploader.oauth2.client import user_logged_in, authserver_authorise_uri
from . import session
from .base_routes import base
+from .files.views import files
from .species import speciesbp
from .oauth2.views import oauth2
from .expression_data import exprdatabp
@@ -82,6 +83,7 @@ def create_app():
# setup blueprints
app.register_blueprint(base, url_prefix="/")
+ app.register_blueprint(files, url_prefix="/files")
app.register_blueprint(oauth2, url_prefix="/oauth2")
app.register_blueprint(speciesbp, url_prefix="/species")
diff --git a/uploader/files/__init__.py b/uploader/files/__init__.py
new file mode 100644
index 0000000..60d2f3b
--- /dev/null
+++ b/uploader/files/__init__.py
@@ -0,0 +1,4 @@
+from .chunks import chunked_binary_read
+from .functions import (fullpath,
+ save_file,
+ sha256_digest_over_file)
diff --git a/uploader/files/chunks.py b/uploader/files/chunks.py
new file mode 100644
index 0000000..c4360b5
--- /dev/null
+++ b/uploader/files/chunks.py
@@ -0,0 +1,32 @@
+"""Functions dealing with chunking of files."""
+from pathlib import Path
+from typing import Iterator
+
+from flask import current_app as app
+from werkzeug.utils import secure_filename
+
+
+def chunked_binary_read(filepath: Path, chunksize: int = 2048) -> Iterator:
+ """Read a file in binary mode in chunks."""
+ with open(filepath, "rb") as inputfile:
+ while True:
+ data = inputfile.read(chunksize)
+ if data != b"":
+ yield data
+ continue
+ break
+
+def chunk_name(uploadfilename: str, chunkno: int) -> str:
+ """Generate chunk name from original filename and chunk number"""
+ if uploadfilename == "":
+ raise ValueError("Name cannot be empty!")
+ if chunkno < 1:
+ raise ValueError("Chunk number must be greater than zero")
+ return f"{secure_filename(uploadfilename)}_part_{chunkno:05d}"
+
+
+def chunks_directory(uniqueidentifier: str) -> Path:
+ """Compute the directory where chunks are temporarily stored."""
+ if uniqueidentifier == "":
+ raise ValueError("Unique identifier cannot be empty!")
+ return Path(app.config["UPLOAD_FOLDER"], f"tempdir_{uniqueidentifier}")
diff --git a/uploader/files.py b/uploader/files/functions.py
index d37a53e..5a3dece 100644
--- a/uploader/files.py
+++ b/uploader/files/functions.py
@@ -1,7 +1,6 @@
"""Utilities to deal with uploaded files."""
import hashlib
from pathlib import Path
-from typing import Iterator
from datetime import datetime
from flask import current_app
@@ -9,6 +8,8 @@ from flask import current_app
from werkzeug.utils import secure_filename
from werkzeug.datastructures import FileStorage
+from .chunks import chunked_binary_read
+
def save_file(fileobj: FileStorage, upload_dir: Path) -> Path:
"""Save the uploaded file and return the path."""
assert bool(fileobj), "Invalid file object!"
@@ -29,17 +30,6 @@ def fullpath(filename: str):
return Path(current_app.config["UPLOAD_FOLDER"], filename).absolute()
-def chunked_binary_read(filepath: Path, chunksize: int = 2048) -> Iterator:
- """Read a file in binary mode in chunks."""
- with open(filepath, "rb") as inputfile:
- while True:
- data = inputfile.read(chunksize)
- if data != b"":
- yield data
- continue
- break
-
-
def sha256_digest_over_file(filepath: Path) -> str:
"""Compute the sha256 digest over a file's contents."""
filehash = hashlib.sha256()
diff --git a/uploader/files/views.py b/uploader/files/views.py
new file mode 100644
index 0000000..cd5f00f
--- /dev/null
+++ b/uploader/files/views.py
@@ -0,0 +1,97 @@
+"""Module for generic files endpoints."""
+import traceback
+from pathlib import Path
+
+from flask import request, jsonify, Blueprint, current_app as app
+
+from .chunks import chunk_name, chunks_directory
+
+files = Blueprint("files", __name__)
+
+@files.route("/upload/resumable", methods=["GET"])
+def resumable_upload_get():
+ """Used for checking whether **ALL** chunks have been uploaded."""
+ fileid = request.args.get("resumableIdentifier", type=str) or ""
+ filename = request.args.get("resumableFilename", type=str) or ""
+ chunk = request.args.get("resumableChunkNumber", type=int) or 0
+ if not(fileid or filename or chunk):
+ return jsonify({
+ "message": "At least one required query parameter is missing.",
+ "error": "BadRequest",
+ "statuscode": 400
+ }), 400
+
+ if Path(chunks_directory(fileid),
+ chunk_name(filename, chunk)).exists():
+ return "OK"
+
+ return jsonify({
+ "message": f"Chunk {chunk} was not found.",
+ "error": "NotFound",
+ "statuscode": 404
+ }), 404
+
+
+def __merge_chunks__(targetfile: Path, chunkpaths: tuple[Path, ...]) -> Path:
+ """Merge the chunks into a single file."""
+ with open(targetfile, "ab") as _target:
+ for chunkfile in chunkpaths:
+ with open(chunkfile, "rb") as _chunkdata:
+ _target.write(_chunkdata.read())
+
+ chunkfile.unlink()
+ return targetfile
+
+
+@files.route("/upload/resumable", methods=["POST"])
+def resumable_upload_post():
+ """Do the actual chunks upload here."""
+ _totalchunks = request.form.get("resumableTotalChunks", type=int) or 0
+ _chunk = request.form.get("resumableChunkNumber", default=1, type=int)
+ _uploadfilename = request.form.get(
+ "resumableFilename", default="", type=str) or ""
+ _fileid = request.form.get(
+ "resumableIdentifier", default="", type=str) or ""
+ _targetfile = Path(app.config["UPLOAD_FOLDER"], _fileid)
+
+ if _targetfile.exists():
+ return jsonify({
+ "message": (
+ "A file with a similar unique identifier has previously been "
+ "uploaded and possibly is/has being/been processed."),
+ "error": "BadRequest",
+ "statuscode": 400
+ }), 400
+
+ try:
+ chunks_directory(_fileid).mkdir(exist_ok=True, parents=True)
+ request.files["file"].save(Path(chunks_directory(_fileid),
+ chunk_name(_uploadfilename, _chunk)))
+
+ # Check whether upload is complete
+ chunkpaths = tuple(
+ Path(chunks_directory(_fileid), chunk_name(_uploadfilename, _achunk))
+ for _achunk in range(1, _totalchunks+1))
+ if all(_file.exists() for _file in chunkpaths):
+ # merge_files and clean up chunks
+ __merge_chunks__(_targetfile, chunkpaths)
+ chunks_directory(_fileid).rmdir()
+ return jsonify({
+ "uploaded-file": _targetfile.name,
+ "message": "File was uploaded successfully!",
+ "statuscode": 200
+ }), 200
+ return jsonify({
+ "message": "Some chunks were not uploaded!",
+ "error": "ChunksUploadError",
+ "error-description": "Some chunks were not uploaded!"
+ })
+ except Exception as exc:# pylint: disable=[broad-except]
+ msg = "Error processing uploaded file chunks."
+ app.logger.error(msg, exc_info=True, stack_info=True)
+ return jsonify({
+ "message": msg,
+ "error": type(exc).__name__,
+ "error-description": " ".join(str(arg) for arg in exc.args),
+ "error-trace": traceback.format_exception(exc)
+ }), 500
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index c4aa67a..f10ba09 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -307,6 +307,76 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable=
population_id=population["Id"]))
+def process_phenotypes_rqtl2_bundle(
+ rconn: Redis, species: dict, population: dict, dataset: dict):
+ """Process phenotypes from the uploaded R/qtl2 bundle."""
+ _redisuri = app.config["REDIS_URL"]
+ _sqluri = app.config["SQL_URI"]
+ try:
+ ## Handle huge files here...
+ phenobundle = save_file(request.files["phenotypes-bundle"],
+ Path(app.config["UPLOAD_FOLDER"]))
+ rqc.validate_bundle(phenobundle)
+ except AssertionError as _aerr:
+ app.logger.debug("File upload error!", exc_info=True)
+ flash("Expected a zipped bundle of files with phenotypes' "
+ "information.",
+ "alert-danger")
+ return add_phenos_uri
+ except rqe.RQTLError as rqtlerr:
+ app.logger.debug("Bundle validation error!", exc_info=True)
+ flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger")
+ return add_phenos_uri
+
+ _jobid = uuid.uuid4()
+ _namespace = jobs.jobsnamespace()
+ _ttl_seconds = app.config["JOBS_TTL_SECONDS"]
+ _job = jobs.launch_job(
+ jobs.initialise_job(
+ rconn,
+ _namespace,
+ str(_jobid),
+ [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri,
+ _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]),
+ str(population["Id"]),
+ # str(dataset["Id"]),
+ str(phenobundle),
+ "--loglevel",
+ {
+ INFO: "INFO",
+ ERROR: "ERROR",
+ DEBUG: "DEBUG",
+ FATAL: "FATAL",
+ CRITICAL: "CRITICAL",
+ WARNING: "WARNING"
+ }[app.logger.getEffectiveLevel()],
+ "--redisexpiry",
+ str(_ttl_seconds)], "phenotype_qc", _ttl_seconds,
+ {"job-metadata": json.dumps({
+ "speciesid": species["SpeciesId"],
+ "populationid": population["Id"],
+ "datasetid": dataset["Id"],
+ "bundle": str(phenobundle.absolute())})}),
+ _redisuri,
+ f"{app.config['UPLOAD_FOLDER']}/job_errors")
+
+ app.logger.debug("JOB DETAILS: %s", _job)
+
+ return redirect(url_for("species.populations.phenotypes.job_status",
+ species_id=species["SpeciesId"],
+ population_id=population["Id"],
+ dataset_id=dataset["Id"],
+ job_id=str(_job["jobid"])))
+
+
+def process_phenotypes_individual_files(rconn, species, population, dataset):
+ """Process the uploaded individual files."""
+ ## Handle huge file uploads here...
+ ## Convert files and settings to R/qtl2 bundle
+ ## Use same processing as R/qtl2 bundle (after some refactoring)
+ raise NotImplementedError("Implement this!")
+
+
@phenotypesbp.route(
"<int:species_id>/populations/<int:population_id>/phenotypes/datasets"
"/<int:dataset_id>/add-phenotypes",
@@ -318,6 +388,7 @@ def create_dataset(species: dict, population: dict, **kwargs):# pylint: disable=
redirect_uri="species.populations.phenotypes.list_datasets")
def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# pylint: disable=[unused-argument, too-many-locals]
"""Add one or more phenotypes to the dataset."""
+ use_bundle = request.args.get("use_bundle", "").lower() == "true"
add_phenos_uri = redirect(url_for(
"species.populations.phenotypes.add_phenotypes",
species_id=species["SpeciesId"],
@@ -333,8 +404,7 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p
today = datetime.date.today()
return render_template(
("phenotypes/add-phenotypes-with-rqtl2-bundle.html"
- if request.args.get("use_bundle", "").lower() == "true"
- else "phenotypes/add-phenotypes-raw-files.html"),
+ if use_bundle else "phenotypes/add-phenotypes-raw-files.html"),
species=species,
population=population,
dataset=dataset,
@@ -347,63 +417,14 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p
current_year=int(today.strftime("%Y")),
families_with_se_and_n=(
"Reference Populations (replicate average, SE, N)",),
+ use_bundle=use_bundle,
activelink="add-phenotypes")
- try:
- ## Handle huge files here...
- phenobundle = save_file(request.files["phenotypes-bundle"],
- Path(app.config["UPLOAD_FOLDER"]))
- rqc.validate_bundle(phenobundle)
- except AssertionError as _aerr:
- app.logger.debug("File upload error!", exc_info=True)
- flash("Expected a zipped bundle of files with phenotypes' "
- "information.",
- "alert-danger")
- return add_phenos_uri
- except rqe.RQTLError as rqtlerr:
- app.logger.debug("Bundle validation error!", exc_info=True)
- flash("R/qtl2 Error: " + " ".join(rqtlerr.args), "alert-danger")
- return add_phenos_uri
-
- _jobid = uuid.uuid4()
- _namespace = jobs.jobsnamespace()
- _ttl_seconds = app.config["JOBS_TTL_SECONDS"]
- _job = jobs.launch_job(
- jobs.initialise_job(
- rconn,
- _namespace,
- str(_jobid),
- [sys.executable, "-m", "scripts.rqtl2.phenotypes_qc", _sqluri,
- _redisuri, _namespace, str(_jobid), str(species["SpeciesId"]),
- str(population["Id"]),
- # str(dataset["Id"]),
- str(phenobundle),
- "--loglevel",
- {
- INFO: "INFO",
- ERROR: "ERROR",
- DEBUG: "DEBUG",
- FATAL: "FATAL",
- CRITICAL: "CRITICAL",
- WARNING: "WARNING"
- }[app.logger.getEffectiveLevel()],
- "--redisexpiry",
- str(_ttl_seconds)], "phenotype_qc", _ttl_seconds,
- {"job-metadata": json.dumps({
- "speciesid": species["SpeciesId"],
- "populationid": population["Id"],
- "datasetid": dataset["Id"],
- "bundle": str(phenobundle.absolute())})}),
- _redisuri,
- f"{app.config['UPLOAD_FOLDER']}/job_errors")
-
- app.logger.debug("JOB DETAILS: %s", _job)
-
- return redirect(url_for("species.populations.phenotypes.job_status",
- species_id=species["SpeciesId"],
- population_id=population["Id"],
- dataset_id=dataset["Id"],
- job_id=str(_job["jobid"])))
+ if use_bundle:
+ return process_phenotypes_rqtl2_bundle(
+ rconn, species, population, dataset)
+ return process_phenotypes_individual_files(
+ rconn, species, population, dataset)
@phenotypesbp.route(
diff --git a/uploader/static/js/files.js b/uploader/static/js/files.js
new file mode 100644
index 0000000..26092f7
--- /dev/null
+++ b/uploader/static/js/files.js
@@ -0,0 +1,119 @@
+var readFirstNLines = (fileelement, count, process_content_fns) => {
+ var thefile = fileelement.files[0];
+ var reader = new FileReader();
+ if(typeof thefile !== "undefined" && thefile !== null) {
+ reader.addEventListener("load", (event) => {
+ var content = event
+ .target
+ .result
+ .split("\n")
+ .slice(0, count)
+ .map((line) => {return line.trim("\r");});
+ process_content_fns.forEach((fn) => {fn(content);});
+ });
+ reader.readAsText(thefile);
+ }
+};
+var read_first_n_lines = readFirstNLines;
+
+
+var readBinaryFile = (file) => {
+ return new Promise((resolve, reject) => {
+ var _reader = new FileReader();
+ _reader.onload = (event) => {resolve(_reader.result);};
+ _reader.readAsArrayBuffer(file);
+ });
+};
+
+
+var Uint8ArrayToHex = (arr) => {
+ var toHex = (val) => {
+ _hex = val.toString(16);
+ if(_hex.length < 2) {
+ return "0" + val;
+ }
+ return _hex;
+ };
+ _hexstr = ""
+ arr.forEach((val) => {_hexstr += toHex(val)});
+ return _hexstr
+};
+
+
+var computeFileChecksum = (file) => {
+ return readBinaryFile(file)
+ .then((content) => {
+ return window.crypto.subtle.digest(
+ "SHA-256", new Uint8Array(content));
+ }).then((digest) => {
+ return Uint8ArrayToHex(new Uint8Array(digest))
+ });
+};
+
+
+var defaultResumableHandler = (event) => {
+ throw new Error("Please provide a valid event handler!");
+};
+
+var addHandler = (resumable, handlername, handler) => {
+ if(resumable.support) {
+ resumable.on(handlername, (handler || defaultResumableHandler));
+ }
+ return resumable;
+};
+
+
+var makeResumableHandler = (handlername) => {
+ return (resumable, handler) => {
+ return addHandler(resumable, handlername, handler);
+ };
+};
+
+
+var fileSuccessHandler = makeResumableHandler("fileSuccess");
+var fileProgressHandler = makeResumableHandler("fileProgress");
+var fileAddedHandler = makeResumableHandler("fileAdded");
+var filesAddedHandler = makeResumableHandler("filesAdded");
+var filesRetryHandler = makeResumableHandler("filesRetry");
+var filesErrorHandler = makeResumableHandler("filesError");
+var uploadStartHandler = makeResumableHandler("uploadStart");
+var completeHandler = makeResumableHandler("complete");
+var progressHandler = makeResumableHandler("progress");
+var errorHandler = makeResumableHandler("error");
+
+
+var markResumableDragAndDropElement = (resumable, fileinput, droparea, browsebutton) => {
+ if(resumable.support) {
+ //Hide file input element and display drag&drop UI
+ add_class(fileinput, "hidden");
+ remove_class(droparea, "hidden");
+
+ // Define UI elements for browse and drag&drop
+ resumable.assignDrop(droparea);
+ resumable.assignBrowse(browsebutton);
+ }
+
+ return resumable;
+};
+
+
+var makeResumableElement = (targeturi, fileinput, droparea, uploadbutton, filetype) => {
+ var resumable = Resumable({
+ target: targeturi,
+ fileType: filetype,
+ maxFiles: 1,
+ forceChunkSize: true,
+ generateUniqueIdentifier: (file, event) => {
+ return computeFileChecksum(file).then((checksum) => {
+ var _relativePath = (file.webkitRelativePath
+ || file.relativePath
+ || file.fileName
+ || file.name);
+ return checksum + "-" + _relativePath.replace(
+ /[^a-zA-Z0-9_-]/img, "");
+ });
+ }
+ });
+
+ return resumable;
+};
diff --git a/uploader/templates/phenotypes/add-phenotypes-base.html b/uploader/templates/phenotypes/add-phenotypes-base.html
index b3a53b0..c4315ee 100644
--- a/uploader/templates/phenotypes/add-phenotypes-base.html
+++ b/uploader/templates/phenotypes/add-phenotypes-base.html
@@ -30,7 +30,9 @@
action="{{url_for('species.populations.phenotypes.add_phenotypes',
species_id=species.SpeciesId,
population_id=population.Id,
- dataset_id=dataset.Id)}}">
+ dataset_id=dataset.Id,
+ use_bundle=use_bundle)}}"
+ data-resumable-target="{{url_for('files.resumable_upload_post')}}">
<legend>Add New Phenotypes</legend>
<div class="form-text help-block">
@@ -57,6 +59,9 @@
<button id="btn-search-pubmed-id" class="btn btn-info">Search</button>
</span>
</div>
+ <span id="search-pubmed-id-error"
+ class="form-text text-muted text-danger hidden">
+ </span><br />
<span class="form-text text-muted">
Enter your publication's PubMed ID above and click "Search" to search
for some (or all) of the publication details requested below.
@@ -157,10 +162,6 @@
{%endblock%}
-{%block sidebarcontents%}
-{{display_pheno_dataset_card(species, population, dataset)}}
-{%endblock%}
-
{%block javascript%}
<script type="text/javascript">
@@ -219,13 +220,12 @@
"journal": details[pubmed_id].fulljournalname,
"volume": details[pubmed_id].volume,
"pages": details[pubmed_id].pages,
- "month": months[_date[1].toLowerCase()],
+ "month": _date.length > 1 ? months[_date[1].toLowerCase()] : "jan",
"year": _date[0],
};
};
var update_publication_details = (details) => {
- console.log("Updating with the following details:", details);
Object.entries(details).forEach((entry) => {;
switch(entry[0]) {
case "authors":
@@ -244,41 +244,7 @@
});
};
- var freds_variable = undefined;
- $("#btn-search-pubmed-id").on("click", (event) => {
- event.preventDefault();
- var search_button = event.target;
- var pubmed_id = $("#txt-pubmed-id").val().trim();
- remove_class($("#txt-pubmed-id").parent(), "has-error");
- if(pubmed_id == "") {
- add_class($("#txt-pubmed-id").parent(), "has-error");
- return false;
- }
-
- var flag_pub_details = false;
- var flag_pub_abstract = false;
- var enable_button = () => {
- search_button.disabled = !(flag_pub_details && flag_pub_abstract);
- };
- search_button.disabled = true;
- // Fetch publication details
- $.ajax("https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi",
- {
- "method": "GET",
- "data": {"db": "pubmed", "id": pubmed_id, "format": "json"},
- "success": (data, textStatus, jqXHR) => {
- // process and update publication details
- update_publication_details(extract_details(
- pubmed_id, data.result));
- },
- "error": (jqXHR, textStatus, errorThrown) => {},
- "complete": () => {
- flag_pub_details = true;
- enable_button();
- },
- "dataType": "json"
- });
- // Fetch the abstract
+ var fetch_publication_abstract = (pubmed_id, pub_details) => {
$.ajax("https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi",
{
"method": "GET",
@@ -289,25 +255,74 @@
"retmode": "xml"
},
"success": (data, textStatus, jqXHR) => {
- // process and update the abstract...
- freds_variable = data;
- console.log("ABSTRACT DETAILS:", data);
update_publication_details({
- "abstract": Array.from(data
- .getElementsByTagName(
- "Abstract")[0]
- .children)
- .map((elt) => {return elt.textContent.trim();})
- .join("\r\n")
- });
+ ...pub_details,
+ ...{
+ "abstract": Array.from(data
+ .getElementsByTagName(
+ "Abstract")[0]
+ .children)
+ .map((elt) => {return elt.textContent.trim();})
+ .join("\r\n")
+ }});
},
"error": (jqXHR, textStatus, errorThrown) => {},
- "complete": (jqXHR, textStatus) => {
- flag_pub_abstract = true;
- enable_button();
- },
+ "complete": (jqXHR, textStatus) => {},
"dataType": "xml"
});
+ };
+
+ var fetch_publication_details = (pubmed_id, complete_thunks) => {
+ error_display = $("#search-pubmed-id-error");
+ error_display.text("");
+ add_class(error_display, "hidden");
+ $.ajax("https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi",
+ {
+ "method": "GET",
+ "data": {"db": "pubmed", "id": pubmed_id, "format": "json"},
+ "success": (data, textStatus, jqXHR) => {
+ // process and update publication details
+ hasError = (
+ Object.hasOwn(data, "error") ||
+ Object.hasOwn(data.result[pubmed_id], "error"));
+ if(hasError) {
+ error_display.text(
+ "There was an error fetching a publication with " +
+ "the given PubMed ID! The error received " +
+ "was: '" + (
+ data.error ||
+ data.result[pubmed_id].error) +
+ "'. Please check ID you provided and try " +
+ "again.");
+ remove_class(error_display, "hidden");
+ } else {
+ fetch_publication_abstract(
+ pubmed_id,
+ extract_details(pubmed_id, data.result));
+ }
+ },
+ "error": (jqXHR, textStatus, errorThrown) => {},
+ "complete": () => {
+ complete_thunks.forEach((thunk) => {thunk()});
+ },
+ "dataType": "json"
+ });
+ };
+
+ $("#btn-search-pubmed-id").on("click", (event) => {
+ event.preventDefault();
+ var search_button = event.target;
+ var pubmed_id = $("#txt-pubmed-id").val().trim();
+ remove_class($("#txt-pubmed-id").parent(), "has-error");
+ if(pubmed_id == "") {
+ add_class($("#txt-pubmed-id").parent(), "has-error");
+ return false;
+ }
+
+ search_button.disabled = true;
+ // Fetch publication details
+ fetch_publication_details(pubmed_id,
+ [() => {search_button.disabled = false;}]);
return false;
});
</script>
diff --git a/uploader/templates/phenotypes/add-phenotypes-raw-files.html b/uploader/templates/phenotypes/add-phenotypes-raw-files.html
index ef0895d..b88d16a 100644
--- a/uploader/templates/phenotypes/add-phenotypes-raw-files.html
+++ b/uploader/templates/phenotypes/add-phenotypes-raw-files.html
@@ -2,6 +2,8 @@
{%from "flash_messages.html" import flash_all_messages%}
{%from "macro-table-pagination.html" import table_pagination%}
{%from "phenotypes/macro-display-pheno-dataset-card.html" import display_pheno_dataset_card%}
+{%from "phenotypes/macro-display-preview-table.html" import display_preview_table%}
+{%from "phenotypes/macro-display-resumable-elements.html" import display_resumable_elements%}
{%block title%}Phenotypes{%endblock%}
@@ -106,13 +108,14 @@
<fieldset id="fldset-data-files">
<legend>Data File(s)</legend>
- <div class="form-group">
+ <div class="form-group non-resumable-elements">
<label for="finput-phenotype-descriptions" class="form-label">
Phenotype Descriptions</label>
<input id="finput-phenotype-descriptions"
name="phenotype-descriptions"
class="form-control"
type="file"
+ data-preview-table="tbl-preview-pheno-desc"
required="required" />
<span class="form-text text-muted">
Provide a file that contains only the phenotype descriptions,
@@ -121,12 +124,23 @@
the documentation for the expected format of the file</a>.</span>
</div>
+ {{display_resumable_elements(
+ "resumable-phenotype-descriptions",
+ "phenotype descriptions",
+ '<p>You can drop a CSV file that contains the phenotype descriptions here,
+ or you can click the "Browse" button to select it from your computer.</p>
+ <p>The CSV file must conform to some standards, as documented in the
+ <a href="#docs-file-phenotype-description"
+ title="Documentation of the phenotype data file format.">
+ "Phenotypes Descriptions" documentation</a> section below.</p>')}}
+
<div class="form-group">
<label for="finput-phenotype-data" class="form-label">Phenotype Data</label>
<input id="finput-phenotype-data"
name="phenotype-data"
class="form-control"
type="file"
+ data-preview-table="tbl-preview-pheno-data"
required="required" />
<span class="form-text text-muted">
Provide a file that contains only the phenotype data. See
@@ -142,6 +156,7 @@
name="phenotype-se"
class="form-control"
type="file"
+ data-preview-table="tbl-preview-pheno-se"
required="required" />
<span class="form-text text-muted">
Provide a file that contains only the standard errors for the phenotypes,
@@ -154,6 +169,7 @@
name="phenotype-n"
class="form-control"
type="file"
+ data-preview-table="tbl-preview-pheno-n"
required="required" />
<span class="form-text text-muted">
Provide a file that contains only the number of samples/individuals used in
@@ -294,20 +310,279 @@
{%endblock%}
+{%block sidebarcontents%}
+{{display_preview_table("tbl-preview-pheno-desc", "descriptions")}}
+{{display_preview_table("tbl-preview-pheno-data", "data")}}
+{%if population.Family in families_with_se_and_n%}
+{{display_preview_table("tbl-preview-pheno-se", "standard errors")}}
+{{display_preview_table("tbl-preview-pheno-n", "number of samples")}}
+{%endif%}
+{{display_pheno_dataset_card(species, population, dataset)}}
+{%endblock%}
+
{%block more_javascript%}
+<script src="{{url_for('base.node_modules',
+ filename='resumablejs/resumable.js')}}"></script>
+<script type="text/javascript" src="/static/js/files.js"></script>
+
<script type="text/javascript">
$("#btn-reset-file-separator").on("click", (event) => {
event.preventDefault();
$("#txt-file-separator").val("\t");
+ $("#txt-file-separator").trigger("change");
});
$("#btn-reset-file-comment-character").on("click", (event) => {
event.preventDefault();
$("#txt-file-comment-character").val("#");
+ $("#txt-file-comment-character").trigger("change");
});
$("#btn-reset-file-na").on("click", (event) => {
event.preventDefault();
$("#txt-file-na").val("- NA N/A");
+ $("#txt-file-na").trigger("change");
+ });
+
+ var update_preview = (table, filedata, formdata, numrows) => {
+ table.find("thead tr").remove()
+ table.find(".data-row").remove();
+ var linenum = 0;
+ var tableheader = table.find("thead");
+ var tablebody = table.find("tbody");
+ var numheadings = 0;
+ var navalues = formdata
+ .na_strings
+ .split(" ")
+ .map((v) => {return v.trim();})
+ .filter((v) => {return Boolean(v);});
+ filedata.forEach((line) => {
+ if(line.startsWith(formdata.comment_char) || linenum >= numrows) {
+ return false;
+ }
+ var row = $("<tr></tr>");
+ line.split(formdata.separator)
+ .map((field) => {
+ var value = field.trim();
+ if(navalues.includes(value)) {
+ return "⋘NUL⋙";
+ }
+ return value;
+ })
+ .filter((field) => {
+ return (field !== "" && field != undefined && field != null);
+ })
+ .forEach((field) => {
+ if(linenum == 0) {
+ numheadings += 1;
+ var tablefield = $("<th></th>");
+ tablefield.text(field);
+ row.append(tablefield);
+ } else {
+ add_class(row, "data-row");
+ var tablefield = $("<td></td>");
+ tablefield.text(field);
+ row.append(tablefield);
+ }
+ });
+
+ if(linenum == 0) {
+ tableheader.append(row);
+ } else {
+ tablebody.append(row);
+ }
+ linenum += 1;
+ });
+
+ if(table.find("tbody tr.data-row").length > 0) {
+ add_class(table.find(".data-row-template"), "hidden");
+ } else {
+ remove_class(table.find(".data-row-template"), "hidden");
+ }
+ };
+
+ var preview_tables_to_elements_map = {
+ "#tbl-preview-pheno-desc": "#finput-phenotype-descriptions",
+ "#tbl-preview-pheno-data": "#finput-phenotype-data",
+ "#tbl-preview-pheno-se": "#finput-phenotype-se",
+ "#tbl-preview-pheno-n": "#finput-phenotype-n"
+ };
+
+ var files_metadata = () => {
+ return {
+ "separator": $("#txt-file-separator").val(),
+ "comment_char": $(
+ "#txt-file-comment-character").val(),
+ "na_strings": $("#txt-file-na").val()
+ }
+ };
+
+ var PREVIEW_ROWS = 5;
+
+ var handler_update_previews = (event) => {
+ Object.entries(preview_tables_to_elements_map).forEach((mapentry) => {
+ var element = $(mapentry[1]);
+ if(element.length === 1) {
+ read_first_n_lines(
+ element[0],
+ 10,
+ [(data) => {
+ update_preview(
+ $(mapentry[0]),
+ data,
+ files_metadata(),
+ PREVIEW_ROWS);}]);
+ }
+ });
+ };
+
+ [
+ "#txt-file-separator",
+ "#txt-file-comment-character",
+ "#txt-file-na"
+ ].forEach((elementid) => {
+ $(elementid).on("change", handler_update_previews);
+ });
+
+ [
+ "#finput-phenotype-descriptions",
+ "#finput-phenotype-data",
+ "#finput-phenotype-se",
+ "#finput-phenotype-n"
+ ].forEach((elementid) => {
+ $(elementid).on("change", (event) => {
+ read_first_n_lines(
+ event.target,
+ 10,
+ [(data) => {
+ update_preview(
+ $("#" + event.target.getAttribute("data-preview-table")),
+ data,
+ files_metadata(),
+ PREVIEW_ROWS);
+ }]);
+ });
+ });
+
+
+ var resumableDisplayFiles = (display_area, files) => {
+ files.forEach((file) => {
+ var display_element = display_area
+ .find(".file-display-template")
+ .clone();
+ remove_class(display_element, "hidden");
+ remove_class(display_element, "file-display-template");
+ add_class(display_element, "file-display");
+ display_element.find(".filename").text(file.name
+ || file.fileName
+ || file.relativePath
+ || file.webkitRelativePath);
+ display_element.find(".filesize").text(
+ (file.size / (1024*1024)).toFixed(2) + "MB");
+ display_element.find(".fileuniqueid").text(file.uniqueIdentifier);
+ display_element.find(".filemimetype").text(file.file.type);
+ display_area.append(display_element);
+ });
+ };
+
+
+ var indicateProgress = (resumable, progress_bar) => {
+ return (event) => {
+ var progress = (resumable.progress() * 100).toFixed(2);
+ var pbar = progress_bar.find(".progress-bar");
+ remove_class(progress_bar, "hidden");
+ pbar.css("width", progress+"%");
+ pbar.attr("aria-valuenow", progress);
+ pbar.text("Uploading: " + progress + "%");
+ };
+ };
+
+ var retryUpload = (retry_button, cancel_button) => {
+ retry_button.on("click", (event) => {
+ resumable.files.forEach((file) => {file.retry();});
+ add_class(retry_button, "hidden");
+ remove_class(cancel_button, "hidden");
+ add_class(browse_button, "hidden");
+ });
+ };
+
+ var cancelUpload = (cancel_button, retry_button) => {
+ cancel_button.on("click", (event) => {
+ resumable.files.forEach((file) => {
+ if(file.isUploading()) {
+ file.abort();
+ }
+ });
+ add_class(cancel_button, "hidden");
+ remove_class(retry_button, "hidden");
+ remove_class(browse_button, "hidden");
+ });
+ };
+
+
+ var startUpload = (browse_button, retry_button, cancel_button) => {
+ return (event) => {
+ remove_class(cancel_button, "hidden");
+ add_class(retry_button, "hidden");
+ add_class(browse_button, "hidden");
+ };
+ };
+
+
+ var uploadSuccess = () => {
+ return (file, message) => {
+ console.log("THE FILE:", file);
+ console.log("THE SUCCESS MESSAGE:", message);
+ // TODOS:
+ // * Save filename/filepath somewhere
+ // * Trigger some function that will run when all files have succeeded
+ };
+ };
+
+
+ var uploadError = () => {
+ return (message, file) => {
+ console.log("THE FILE:", file);
+ console.log("THE ERROR MESSAGE:", message);
+ };
+ };
+
+
+ var r = errorHandler(
+ fileSuccessHandler(
+ uploadStartHandler(
+ filesAddedHandler(
+ markResumableDragAndDropElement(
+ makeResumableElement(
+ $("#frm-add-phenotypes").attr("data-resumable-target"),
+ $("#finput-phenotype-descriptions").parent(),
+ $("#resumable-phenotype-descriptions"),
+ $("#frm-add-phenotypes input[type=submit]"),
+ ["csv", "tsv"]),
+ $("#finput-phenotype-descriptions").parent(),
+ $("#resumable-phenotype-descriptions"),
+ $("#resumable-phenotype-descriptions-browse-button")),
+ (files) => {
+ // TODO: Also trigger preview!
+ resumableDisplayFiles(
+ $("#resumable-phenotype-descriptions-selected-files"), files);
+ }),
+ startUpload($("#resumable-phenotype-descriptions-browse-button"),
+ $("#resumable-phenotype-descriptions-retry-button"),
+ $("#resumable-phenotype-descriptions-cancel-button"))),
+ uploadSuccess()),
+ uploadError());
+
+ progressHandler(
+ r,
+ indicateProgress(r, $("#resumable-phenotype-descriptions-progress-bar")));
+
+
+ $("#frm-add-phenotypes input[type=submit]").on("click", (event) => {
+ event.preventDefault();
+ // TODO: Check all the relevant files exist
+ // TODO: Check all fields
+ // Start the uploads.
+ r.upload();
});
</script>
{%endblock%}
diff --git a/uploader/templates/phenotypes/add-phenotypes-with-rqtl2-bundle.html b/uploader/templates/phenotypes/add-phenotypes-with-rqtl2-bundle.html
index 8f67baa..898fc0c 100644
--- a/uploader/templates/phenotypes/add-phenotypes-with-rqtl2-bundle.html
+++ b/uploader/templates/phenotypes/add-phenotypes-with-rqtl2-bundle.html
@@ -201,3 +201,7 @@
<em>phenotypes × individuals</em>.</p>
</div>
{%endblock%}
+
+{%block sidebarcontents%}
+{{display_pheno_dataset_card(species, population, dataset)}}
+{%endblock%}
diff --git a/uploader/templates/phenotypes/macro-display-preview-table.html b/uploader/templates/phenotypes/macro-display-preview-table.html
new file mode 100644
index 0000000..7509158
--- /dev/null
+++ b/uploader/templates/phenotypes/macro-display-preview-table.html
@@ -0,0 +1,21 @@
+{%macro display_preview_table(tableid, filetype)%}
+<div class="card" style="max-width: 676px;">
+ <div class="card-body">
+ <h5 class="card-title">Phenotypes '{{filetype | title}}' File Preview</h5>
+ <div class="card-text">
+ <table id="{{tableid}}" class="table table-condensed table-responsive" style="overflow: hidden;">
+ <thead>
+ <tr>
+ </tr>
+ <tbody>
+ <tr>
+ <td class="data-row-template text-info">
+ Provide a phenotype '{{filetype | lower}}' file to preview.
+ </td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+ </div>
+</div>
+{%endmacro%}