aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--quality_control/checks.py15
-rw-r--r--quality_control/parsing.py17
-rw-r--r--scripts/load_phenotypes_to_db.py6
-rw-r--r--tests/r_qtl/test_r_qtl2_control_file.py1
-rw-r--r--tests/uploader/phenotypes/test_misc.py108
-rw-r--r--tests/uploader/publications/test_misc.py1
-rw-r--r--tests/uploader/test_parse.py3
-rw-r--r--uploader/__init__.py19
-rw-r--r--uploader/authorisation.py2
-rw-r--r--uploader/background_jobs.py112
-rw-r--r--uploader/jobs.py9
-rw-r--r--uploader/monadic_requests.py16
-rw-r--r--uploader/phenotypes/models.py4
-rw-r--r--uploader/phenotypes/views.py67
-rw-r--r--uploader/platforms/models.py3
-rw-r--r--uploader/publications/datatables.py52
-rw-r--r--uploader/publications/models.py10
-rw-r--r--uploader/publications/views.py29
-rw-r--r--uploader/route_utils.py3
-rw-r--r--uploader/static/css/styles.css6
-rw-r--r--uploader/templates/jobs/job-error.html17
-rw-r--r--uploader/templates/jobs/job-status.html4
-rw-r--r--uploader/templates/phenotypes/add-phenotypes-base.html43
-rw-r--r--uploader/templates/phenotypes/create-dataset.html6
-rw-r--r--uploader/templates/phenotypes/load-phenotypes-success.html42
25 files changed, 485 insertions, 110 deletions
diff --git a/quality_control/checks.py b/quality_control/checks.py
index bdfd12b..bb05e31 100644
--- a/quality_control/checks.py
+++ b/quality_control/checks.py
@@ -52,12 +52,15 @@ def decimal_places_pattern(mini: int, maxi: Optional[int] = None) -> re.Pattern:
+ r")$"
)
-def decimal_points_error(filename: str,# pylint: disable=[too-many-arguments]
- lineno: int,
- field: str,
- value: str,
- mini: int,
- maxi: Optional[int] = None) -> Optional[InvalidValue]:
+def decimal_points_error(
+ # pylint: disable=[too-many-arguments, too-many-positional-arguments]
+ filename: str,
+ lineno: int,
+ field: str,
+ value: str,
+ mini: int,
+ maxi: Optional[int] = None
+) -> Optional[InvalidValue]:
"""
Check that 'value' in a decimal number with the appropriate decimal places.
"""
diff --git a/quality_control/parsing.py b/quality_control/parsing.py
index f1d21fc..7a8185d 100644
--- a/quality_control/parsing.py
+++ b/quality_control/parsing.py
@@ -104,23 +104,22 @@ def collect_errors(
if line_number == 1:
consistent_columns_checker = make_column_consistency_checker(
filename, line)
- for error in __process_errors__(
- filename, line_number, line,
- partial(header_errors, strains=strains),
- errors):
- yield error
+ yield from __process_errors__(
+ filename, line_number, line,
+ partial(header_errors, strains=strains),
+ errors)
if line_number != 1:
- col_consistency_error = consistent_columns_checker(line_number, line)
+ col_consistency_error = consistent_columns_checker(# pylint: disable=[possibly-used-before-assignment]
+ line_number, line)
if col_consistency_error:
yield col_consistency_error
- for error in __process_errors__(
+ yield from __process_errors__(
filename, line_number, line, (
average_errors if filetype == FileType.AVERAGE
else se_errors),
- errors):
- yield error
+ errors)
if update_progress:
update_progress(line_number, line)
diff --git a/scripts/load_phenotypes_to_db.py b/scripts/load_phenotypes_to_db.py
index dc083cd..8f49e10 100644
--- a/scripts/load_phenotypes_to_db.py
+++ b/scripts/load_phenotypes_to_db.py
@@ -358,7 +358,7 @@ def load_data(conn: mysqldb.Connection, job: dict) -> int:
int(_job_metadata["dataset_id"]))
# 1. Just retrive the publication: Don't create publications for now.
_publication = fetch_publication_by_id(
- conn, int(_job_metadata.get("publicationid", "0"))) or {"Id": 0}
+ conn, int(_job_metadata.get("publication_id", "0"))) or {"Id": 0}
# 2. Save all new phenotypes:
# -> return phenotype IDs
bundle = Path(_job_metadata["bundle_file"])
@@ -491,7 +491,7 @@ if __name__ == "__main__":
db_results = load_data(conn, job)
- logger.debug("Unlocking all database tables.")
+ logger.info("Unlocking all database tables.")
cursor.execute("UNLOCK TABLES")
# Update authorisations (break this down) — maybe loop until it works?
@@ -504,7 +504,7 @@ if __name__ == "__main__":
try:
sys.exit(main())
- except:
+ except Exception as _exc:
logger.debug("Data loading failed… Halting!",
exc_info=True)
sys.exit(1)
diff --git a/tests/r_qtl/test_r_qtl2_control_file.py b/tests/r_qtl/test_r_qtl2_control_file.py
index 316307d..5b9fef6 100644
--- a/tests/r_qtl/test_r_qtl2_control_file.py
+++ b/tests/r_qtl/test_r_qtl2_control_file.py
@@ -16,6 +16,7 @@ __DEFAULTS__ = {
"pheno_transposed": False,
"covar_transposed": False,
"phenocovar_transposed": False,
+ "phenonum_transposed": False,
"gmap_transposed": False,
"pmap_transposed": False,
"phenose_transposed": False
diff --git a/tests/uploader/phenotypes/test_misc.py b/tests/uploader/phenotypes/test_misc.py
index c0261aa..cf475ad 100644
--- a/tests/uploader/phenotypes/test_misc.py
+++ b/tests/uploader/phenotypes/test_misc.py
@@ -218,12 +218,54 @@ __sample_db_phenotypes_data__ = (
}
}),
__sample_db_phenotypes_data__,
- ({"PhenotypeId": 4, "xref_id": 10001, "DataId": 8967043, "StrainId": 4, "StrainName": "BXD1", "value": 77.2},
- {"PhenotypeId": 15, "xref_id": 10003, "DataId": 8967045, "StrainId": 6, "StrainName": "BXD5", "value": 503},
- {"PhenotypeId": 15, "xref_id": 10003, "DataId": 8967045, "StrainId": 7, "StrainName": "BXD6", "value": 903},
- {"PhenotypeId": 20, "xref_id": 10004, "DataId": 8967046, "StrainId": 3, "StrainName": "DBA/2J", "value": 1},
- {"PhenotypeId": 20, "xref_id": 10004, "DataId": 8967046, "StrainId": 4, "StrainName": "BXD1", "value": 8},
- {"PhenotypeId": 20, "xref_id": 10004, "DataId": 8967046, "StrainId": 5, "StrainName": "BXD2", "value": 9})),
+ ({
+ "PhenotypeId": 4,
+ "xref_id": 10001,
+ "DataId": 8967043,
+ "StrainId": 4,
+ "StrainName": "BXD1",
+ "value": 77.2
+ },
+ {
+ "PhenotypeId": 15,
+ "xref_id": 10003,
+ "DataId": 8967045,
+ "StrainId": 6,
+ "StrainName": "BXD5",
+ "value": 503
+ },
+ {
+ "PhenotypeId": 15,
+ "xref_id": 10003,
+ "DataId": 8967045,
+ "StrainId": 7,
+ "StrainName": "BXD6",
+ "value": 903
+ },
+ {
+ "PhenotypeId": 20,
+ "xref_id": 10004,
+ "DataId": 8967046,
+ "StrainId": 3,
+ "StrainName": "DBA/2J",
+ "value": 1
+ },
+ {
+ "PhenotypeId": 20,
+ "xref_id": 10004,
+ "DataId": 8967046,
+ "StrainId": 4,
+ "StrainName": "BXD1",
+ "value": 8
+ },
+ {
+ "PhenotypeId": 20,
+ "xref_id": 10004,
+ "DataId": 8967046,
+ "StrainId": 5,
+ "StrainName": "BXD2",
+ "value": 9
+ })),
# Changes — with deletions
(({
@@ -292,12 +334,54 @@ __sample_db_phenotypes_data__ = (
}
}),
__sample_db_phenotypes_data__,
- ({"PhenotypeId": 4, "xref_id": 10001, "DataId": 8967043, "StrainId": 4, "StrainName": "BXD1", "value": None},
- {"PhenotypeId": 15, "xref_id": 10003, "DataId": 8967045, "StrainId": 6, "StrainName": "BXD5", "value": None},
- {"PhenotypeId": 15, "xref_id": 10003, "DataId": 8967045, "StrainId": 7, "StrainName": "BXD6", "value": None},
- {"PhenotypeId": 20, "xref_id": 10004, "DataId": 8967046, "StrainId": 3, "StrainName": "DBA/2J", "value": 15},
- {"PhenotypeId": 20, "xref_id": 10004, "DataId": 8967046, "StrainId": 4, "StrainName": "BXD1", "value": None},
- {"PhenotypeId": 20, "xref_id": 10004, "DataId": 8967046, "StrainId": 5, "StrainName": "BXD2", "value": 24}))))
+ ({
+ "PhenotypeId": 4,
+ "xref_id": 10001,
+ "DataId": 8967043,
+ "StrainId": 4,
+ "StrainName": "BXD1",
+ "value": None
+ },
+ {
+ "PhenotypeId": 15,
+ "xref_id": 10003,
+ "DataId": 8967045,
+ "StrainId": 6,
+ "StrainName": "BXD5",
+ "value": None
+ },
+ {
+ "PhenotypeId": 15,
+ "xref_id": 10003,
+ "DataId": 8967045,
+ "StrainId": 7,
+ "StrainName": "BXD6",
+ "value": None
+ },
+ {
+ "PhenotypeId": 20,
+ "xref_id": 10004,
+ "DataId": 8967046,
+ "StrainId": 3,
+ "StrainName": "DBA/2J",
+ "value": 15
+ },
+ {
+ "PhenotypeId": 20,
+ "xref_id": 10004,
+ "DataId": 8967046,
+ "StrainId": 4,
+ "StrainName": "BXD1",
+ "value": None
+ },
+ {
+ "PhenotypeId": 20,
+ "xref_id": 10004,
+ "DataId": 8967046,
+ "StrainId": 5,
+ "StrainName": "BXD2",
+ "value": 24
+ }))))
def test_phenotypes_data_differences(filedata, dbdata, expected):
"""Test differences are computed correctly."""
assert phenotypes_data_differences(filedata, dbdata) == expected
diff --git a/tests/uploader/publications/test_misc.py b/tests/uploader/publications/test_misc.py
index 7a52941..8c7e567 100644
--- a/tests/uploader/publications/test_misc.py
+++ b/tests/uploader/publications/test_misc.py
@@ -63,5 +63,6 @@ from uploader.publications.misc import publications_differences
{"PhenotypeId": 1, "xref_id": 10004, "PublicationId": None,
"PubMed_ID": None}))))
def test_publications_differences(filedata, dbdata, pubmed2pubidmap, expected):
+ """Test publication differences — flesh out description…"""
assert publications_differences(
filedata, dbdata, pubmed2pubidmap) == expected
diff --git a/tests/uploader/test_parse.py b/tests/uploader/test_parse.py
index 076c47c..20c75b7 100644
--- a/tests/uploader/test_parse.py
+++ b/tests/uploader/test_parse.py
@@ -8,7 +8,8 @@ from uploader.jobs import job, jobsnamespace
from tests.conftest import uploadable_file_object
-def test_parse_with_existing_uploaded_file(#pylint: disable=[too-many-arguments]
+def test_parse_with_existing_uploaded_file(
+ #pylint: disable=[too-many-arguments,too-many-positional-arguments]
client,
db_url,
redis_url,
diff --git a/uploader/__init__.py b/uploader/__init__.py
index b986c81..8b49ad5 100644
--- a/uploader/__init__.py
+++ b/uploader/__init__.py
@@ -3,13 +3,17 @@ import os
import sys
import logging
from pathlib import Path
+from typing import Optional
from flask import Flask, request
-from flask_session import Session
+
from cachelib import FileSystemCache
from gn_libs import jobs as gnlibs_jobs
+from flask_session import Session
+
+
from uploader.oauth2.client import user_logged_in, authserver_authorise_uri
from . import session
@@ -63,15 +67,24 @@ def setup_logging(app: Flask) -> Flask:
def setup_modules_logging(app_logger):
"""Setup module-level loggers to the same log-level as the application."""
loglevel = logging.getLevelName(app_logger.getEffectiveLevel())
- logging.getLogger("uploader.publications.models").setLevel(loglevel)
+ def __setup__(logger_name):
+ _logger = logging.getLogger(logger_name)
+ _logger.setLevel(loglevel)
+
+ __setup__("uploader.publications.models")
+ __setup__("uploader.publications.datatables")
-def create_app(config: dict = {}):
+
+def create_app(config: Optional[dict] = None):
"""The application factory.
config: dict
Useful to override settings in the settings files and environment
especially in environments such as testing."""
+ if config is None:
+ config = {}
+
app = Flask(__name__)
### BEGIN: Application configuration
diff --git a/uploader/authorisation.py b/uploader/authorisation.py
index bc950d8..3cf3585 100644
--- a/uploader/authorisation.py
+++ b/uploader/authorisation.py
@@ -48,7 +48,7 @@ def require_token(func: Callable) -> Callable:
"""
def __invalid_token__(_whatever):
logging.debug("==========> Failure log: %s", _whatever)
- raise Exception(
+ raise Exception(# pylint: disable=[broad-exception-raised]
"You attempted to access a feature of the system that requires "
"authorisation. Unfortunately, we could not verify you have the "
"appropriate authorisation to perform the action you requested. "
diff --git a/uploader/background_jobs.py b/uploader/background_jobs.py
index ac47ff2..dc9f837 100644
--- a/uploader/background_jobs.py
+++ b/uploader/background_jobs.py
@@ -1,14 +1,88 @@
+"""Generic views and utilities to handle background jobs."""
import uuid
+import importlib
+from typing import Callable
+from functools import partial
-from flask import request, Blueprint, render_template, current_app as app
+from flask import (
+ url_for,
+ redirect,
+ Response,
+ Blueprint,
+ render_template,
+ current_app as app)
from gn_libs import jobs
-from gn_libs.jobs.jobs import JobNotFound
from gn_libs import sqlite3
+from gn_libs.jobs.jobs import JobNotFound
from uploader.authorisation import require_login
background_jobs_bp = Blueprint("background-jobs", __name__)
+HandlerType = Callable[[dict], Response]
+
+
+def __default_error_handler__(job: dict) -> Response:
+ return redirect(url_for("background-jobs.job_error", job_id=job["job_id"]))
+
+def register_handlers(
+ job_type: str,
+ success_handler: HandlerType,
+ # pylint: disable=[redefined-outer-name]
+ error_handler: HandlerType = __default_error_handler__
+ # pylint: disable=[redefined-outer-name]
+) -> str:
+ """Register success and error handlers for each job type."""
+ if not bool(app.config.get("background-jobs")):
+ app.config["background-jobs"] = {}
+
+ if not bool(app.config["background-jobs"].get(job_type)):
+ app.config["background-jobs"][job_type] = {
+ "success": success_handler,
+ "error": error_handler
+ }
+
+ return job_type
+
+
+def register_job_handlers(job: str):
+ """Related to register handlers above."""
+ def __load_handler__(absolute_function_path):
+ _parts = absolute_function_path.split(".")
+ app.logger.debug("THE PARTS ARE: %s", _parts)
+ assert len(_parts) > 1, f"Invalid path: {absolute_function_path}"
+ module = importlib.import_module(f".{_parts[-2]}",
+ package=".".join(_parts[0:-2]))
+ return getattr(module, _parts[-1])
+
+ metadata = job["metadata"]
+ if metadata["success_handler"]:
+ _success_handler = __load_handler__(metadata["success_handler"])
+ try:
+ _error_handler = __load_handler__(metadata["error_handler"])
+ except Exception as _exc:# pylint: disable=[broad-exception-caught]
+ _error_handler = __default_error_handler__
+ register_handlers(
+ metadata["job-type"], _success_handler, _error_handler)
+
+
+def handler(job: dict, handler_type: str) -> HandlerType:
+ """Fetch a handler for the job."""
+ _job_type = job["metadata"]["job-type"]
+ _handler = app.config.get(
+ "background-jobs", {}
+ ).get(
+ _job_type, {}
+ ).get(handler_type)
+ if bool(_handler):
+ return _handler(job)
+ raise Exception(# pylint: disable=[broad-exception-raised]
+ f"No '{handler_type}' handler registered for job type: {_job_type}")
+
+
+error_handler = partial(handler, handler_type="error")
+success_handler = partial(handler, handler_type="success")
+
@background_jobs_bp.route("/status/<uuid:job_id>")
@require_login
@@ -17,19 +91,29 @@ def job_status(job_id: uuid.UUID):
with sqlite3.connection(app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"]) as conn:
try:
job = jobs.job(conn, job_id, fulldetails=True)
- stdout = ""
- stderr = ""
- # with (open(job["metadata"]["stdout-file"], encoding="utf-8") as stdout_file,
- # open(job["metadata"]["stderr-file"], encoding="utf-8") as stderr_file):
- # stdout = stdout_file.read()
- # stderr = stderr_file.read()
+ status = job["metadata"]["status"]
- return render_template(
- f"jobs/job-status.html",
- job=job,
- stdout=stdout,
- stderr=stderr)
- except JobNotFound as jnf:
+ register_job_handlers(job)
+ if status == "error":
+ return error_handler(job)
+
+ if status == "completed":
+ return success_handler(job)
+
+ return render_template("jobs/job-status.html", job=job)
+ except JobNotFound as _jnf:
return render_template(
"jobs/job-not-found.html",
job_id=job_id)
+
+
+@background_jobs_bp.route("/error/<uuid:job_id>")
+@require_login
+def job_error(job_id: uuid.UUID):
+ """Handle job errors in a generic manner."""
+ with sqlite3.connection(app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"]) as conn:
+ try:
+ job = jobs.job(conn, job_id, fulldetails=True)
+ return render_template("jobs/job-error.html", job=job)
+ except JobNotFound as _jnf:
+ return render_template("jobs/job-not-found.html", job_id=job_id)
diff --git a/uploader/jobs.py b/uploader/jobs.py
index e86ee05..5968c03 100644
--- a/uploader/jobs.py
+++ b/uploader/jobs.py
@@ -41,7 +41,8 @@ def error_filename(jobid, error_dir):
"Compute the path of the file where errors will be dumped."
return f"{error_dir}/job_{jobid}.error"
-def initialise_job(# pylint: disable=[too-many-arguments]
+def initialise_job(
+ # pylint: disable=[too-many-arguments, too-many-positional-arguments]
rconn: Redis, rprefix: str, jobid: str, command: list, job_type: str,
ttl_seconds: int = 86400, extra_meta: Optional[dict] = None) -> dict:
"Initialise a job 'object' and put in on redis"
@@ -54,7 +55,8 @@ def initialise_job(# pylint: disable=[too-many-arguments]
name=job_key(rprefix, jobid), time=timedelta(seconds=ttl_seconds))
return the_job
-def build_file_verification_job(#pylint: disable=[too-many-arguments]
+def build_file_verification_job(
+ #pylint: disable=[too-many-arguments, too-many-positional-arguments]
redis_conn: Redis,
dburi: str,
redisuri: str,
@@ -77,7 +79,8 @@ def build_file_verification_job(#pylint: disable=[too-many-arguments]
"filename": os.path.basename(filepath), "percent": 0
})
-def data_insertion_job(# pylint: disable=[too-many-arguments]
+def data_insertion_job(
+ # pylint: disable=[too-many-arguments, too-many-positional-arguments]
redis_conn: Redis, filepath: str, filetype: str, totallines: int,
speciesid: int, platformid: int, datasetid: int, databaseuri: str,
redisuri: str, ttl_seconds: int) -> dict:
diff --git a/uploader/monadic_requests.py b/uploader/monadic_requests.py
index f1f5c77..eda42d0 100644
--- a/uploader/monadic_requests.py
+++ b/uploader/monadic_requests.py
@@ -59,6 +59,11 @@ def get(url, params=None, **kwargs) -> Either:
:rtype: pymonad.either.Either
"""
+ timeout = kwargs.get("timeout")
+ kwargs = {key: val for key,val in kwargs.items() if key != "timeout"}
+ if timeout is None:
+ timeout = (9.13, 20)
+
try:
resp = requests.get(url, params=params, **kwargs)
if resp.status_code in SUCCESS_CODES:
@@ -76,6 +81,11 @@ def post(url, data=None, json=None, **kwargs) -> Either:
:rtype: pymonad.either.Either
"""
+ timeout = kwargs.get("timeout")
+ kwargs = {key: val for key,val in kwargs.items() if key != "timeout"}
+ if timeout is None:
+ timeout = (9.13, 20)
+
try:
resp = requests.post(url, data=data, json=json, **kwargs)
if resp.status_code in SUCCESS_CODES:
@@ -95,10 +105,10 @@ def make_either_error_handler(msg):
try:
_data = error.json()
except Exception as _exc:
- raise Exception(error.content) from _exc
- raise Exception(_data)
+ raise Exception(error.content) from _exc# pylint: disable=[broad-exception-raised]
+ raise Exception(_data)# pylint: disable=[broad-exception-raised]
app.logger.debug("\n\n%s\n\n", msg)
- raise Exception(error)
+ raise Exception(error)# pylint: disable=[broad-exception-raised]
return __fail__
diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py
index 20b8e77..a6ee694 100644
--- a/uploader/phenotypes/models.py
+++ b/uploader/phenotypes/models.py
@@ -374,14 +374,14 @@ def quick_save_phenotypes_data(
prefix=f"{table}_data", mode="wt", dir=tmpdir) as tmpfile,
conn.cursor(cursorclass=DictCursor) as cursor):
_count = 0
- console.debug("Write data rows to text file.")
+ logger.debug("Write data rows to text file.")
for row in dataitems:
tmpfile.write(
f'{row["data_id"]}\t{row["sample_id"]}\t{row["value"]}\n')
_count = _count + 1
tmpfile.flush()
- console.debug("Load text file into database (table: %s)",
+ logger.debug("Load text file into database (table: %s)",
_table_details["table"])
cursor.execute(
f"LOAD DATA LOCAL INFILE '{tmpfile.name}' "
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index 49c12b5..6bc7471 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -23,6 +23,7 @@ from werkzeug.utils import secure_filename
from gn_libs import sqlite3
from gn_libs import jobs as gnlibs_jobs
+from gn_libs.jobs.jobs import JobNotFound
from gn_libs.mysqldb import database_connection
from gn_libs import monadic_requests as mrequests
@@ -613,6 +614,16 @@ def review_job_data(
activelink="add-phenotypes")
+def load_phenotypes_success_handler(job):
+ """Handle loading new phenotypes into the database successfully."""
+ return redirect(url_for(
+ "species.populations.phenotypes.load_data_success",
+ species_id=job["metadata"]["species_id"],
+ population_id=job["metadata"]["population_id"],
+ dataset_id=job["metadata"]["dataset_id"],
+ job_id=job["job_id"]))
+
+
@phenotypesbp.route(
"<int:species_id>/populations/<int:population_id>/phenotypes/datasets"
"/<int:dataset_id>/load-data-to-database",
@@ -635,6 +646,7 @@ def load_data_to_database(
qc_job = jobs.job(rconn, jobs.jobsnamespace(), request.form["data-qc-job-id"])
_meta = json.loads(qc_job["job-metadata"])
load_job_id = uuid.uuid4()
+ _loglevel = logging.getLevelName(app.logger.getEffectiveLevel()).lower()
command = [
sys.executable,
"-u",
@@ -644,17 +656,17 @@ def load_data_to_database(
jobs_db,
str(load_job_id),
"--log-level",
- logging.getLevelName(
- app.logger.getEffectiveLevel()
- ).lower()
+ _loglevel
]
def __handle_error__(resp):
- raise Exception(resp)
+ return render_template("http-error.html", *resp.json())
def __handle_success__(load_job):
app.logger.debug("The phenotypes loading job: %s", load_job)
- return str(load_job)
+ return redirect(url_for(
+ "background-jobs.job_status", job_id=load_job["job_id"]))
+
issued = datetime.datetime.now()
jwtkey = jwks.newest_jwk_with_rotation(
jwks.jwks_directory(app, "UPLOADER_SECRETS"),
@@ -697,15 +709,20 @@ def load_data_to_database(
"population_id": population["Id"],
"dataset_id": dataset["Id"],
"bundle_file": _meta["bundle"],
+ "publication_id": _meta["publicationid"],
"authserver": oauth2client.authserver_uri(),
- "token": token["access_token"]
+ "token": token["access_token"],
+ "success_handler": (
+ "uploader.phenotypes.views"
+ ".load_phenotypes_success_handler")
})
).then(
lambda job: gnlibs_jobs.launch_job(
- job,
- jobs_db,
- f"{app.config['UPLOAD_FOLDER']}/job_errors",
- worker_manager="gn_libs.jobs.launcher")
+ job,
+ jobs_db,
+ f"{app.config['UPLOAD_FOLDER']}/job_errors",
+ worker_manager="gn_libs.jobs.launcher",
+ loglevel=_loglevel)
).either(__handle_error__, __handle_success__)
@@ -1114,3 +1131,33 @@ def edit_upload_phenotype_data(# pylint: disable=[unused-argument]
return redirect(url_for("background-jobs.job_status",
job_id=job_id,
job_type="phenotype-bulk-edit"))
+
+
+@phenotypesbp.route(
+ "<int:species_id>/populations/<int:population_id>/phenotypes/datasets"
+ "/<int:dataset_id>/load-data-success/<uuid:job_id>",
+ methods=["GET"])
+@require_login
+@with_dataset(
+ species_redirect_uri="species.populations.phenotypes.index",
+ population_redirect_uri="species.populations.phenotypes.select_population",
+ redirect_uri="species.populations.phenotypes.list_datasets")
+def load_data_success(
+ species: dict,
+ population: dict,
+ dataset: dict,
+ job_id: uuid.UUID,
+ **kwargs
+):# pylint: disable=[unused-argument]
+ with sqlite3.connection(app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"]) as conn:
+ try:
+ job = gnlibs_jobs.job(conn, job_id, fulldetails=True)
+ app.logger.debug("THE JOB: %s", job)
+ return render_template("phenotypes/load-phenotypes-success.html",
+ species=species,
+ population=population,
+ dataset=dataset,
+ job=job,
+ gn2_server_url=app.config["GN2_SERVER_URL"])
+ except JobNotFound as jnf:
+ return render_template("jobs/job-not-found.html", job_id=job_id)
diff --git a/uploader/platforms/models.py b/uploader/platforms/models.py
index a859371..0dd9368 100644
--- a/uploader/platforms/models.py
+++ b/uploader/platforms/models.py
@@ -56,7 +56,8 @@ def platform_by_species_and_id(
return None
-def save_new_platform(# pylint: disable=[too-many-arguments]
+def save_new_platform(
+ # pylint: disable=[too-many-arguments, too-many-positional-arguments]
cursor: Cursor,
species_id: int,
geo_platform: str,
diff --git a/uploader/publications/datatables.py b/uploader/publications/datatables.py
new file mode 100644
index 0000000..e07fafd
--- /dev/null
+++ b/uploader/publications/datatables.py
@@ -0,0 +1,52 @@
+"""Fetch data for datatables."""
+import logging
+from typing import Optional
+
+from MySQLdb.cursors import DictCursor
+
+from gn_libs.mysqldb import Connection, debug_query
+
+logger = logging.getLogger(__name__)
+
+def fetch_publications(
+ conn: Connection,
+ search: Optional[str] = None,
+ offset: int = 0,
+ limit: int = -1
+) -> tuple[dict, int, int, int]:
+ """Fetch publications from the database."""
+ _query = "SELECT * FROM Publication"
+ _count_query = "SELECT COUNT(*) FROM Publication"
+ _params = None
+ _where_clause = ""
+ _limit_clause = ""
+ if search is not None and bool(search):
+ _where_clause = ("WHERE PubMed_ID LIKE %s "
+ "OR Authors LIKE %s "
+ "OR Title LIKE %s")
+ _params = (f"%{search}%",) * 3
+
+ if limit > 0:
+ _limit_clause = f"LIMIT {limit} OFFSET {offset}"
+
+ with conn.cursor(cursorclass=DictCursor) as cursor:
+ cursor.execute("SELECT COUNT(*) FROM Publication")
+ _total_rows = int(cursor.fetchone()["COUNT(*)"])
+
+ cursor.execute(f"{_count_query} {_where_clause}", _params)
+ debug_query(cursor, logger)
+ _result = cursor.fetchone()
+ _total_filtered = int(_result["COUNT(*)"] if bool(_result) else 0)
+
+ cursor.execute(f"{_query} {_where_clause} {_limit_clause}", _params)
+ debug_query(cursor, logger)
+ _current_filtered = tuple(
+ {**dict(row), "index": idx}
+ for idx, row
+ in enumerate(cursor.fetchall(), start=offset+1))
+
+ return (
+ _current_filtered,
+ len(_current_filtered),
+ _total_filtered,
+ _total_rows)
diff --git a/uploader/publications/models.py b/uploader/publications/models.py
index 2b0339b..b199991 100644
--- a/uploader/publications/models.py
+++ b/uploader/publications/models.py
@@ -1,6 +1,6 @@
"""Module to handle persistence and retrieval of publication to/from MariaDB"""
import logging
-from typing import Iterable
+from typing import Iterable, Optional
from MySQLdb.cursors import DictCursor
@@ -69,14 +69,6 @@ def update_publications(conn: Connection , publications: tuple[dict, ...]) -> tu
return tuple()
-def fetch_publications(conn: Connection) -> Iterable[dict]:
- """Fetch publications from the database."""
- with conn.cursor(cursorclass=DictCursor) as cursor:
- cursor.execute("SELECT * FROM Publication")
- for row in cursor.fetchall():
- yield dict(row)
-
-
def fetch_publication_by_id(conn: Connection, publication_id: int) -> dict:
"""Fetch a specific publication from the database."""
with conn.cursor(cursorclass=DictCursor) as cursor:
diff --git a/uploader/publications/views.py b/uploader/publications/views.py
index ebb8740..0608a35 100644
--- a/uploader/publications/views.py
+++ b/uploader/publications/views.py
@@ -1,6 +1,7 @@
"""Endpoints for publications"""
import json
+from MySQLdb.cursors import DictCursor
from gn_libs.mysqldb import database_connection
from flask import (
flash,
@@ -14,11 +15,12 @@ from flask import (
from uploader.authorisation import require_login
from .models import (
- fetch_publications,
fetch_publication_by_id,
create_new_publications,
fetch_publication_phenotypes)
+from .datatables import fetch_publications
+
from gn_libs.debug import __pk__
pubbp = Blueprint("publications", __name__)
@@ -35,12 +37,25 @@ def index():
@pubbp.route("/list", methods=["GET"])
@require_login
def list_publications():
- with database_connection(app.config["SQL_URI"]) as conn:
+ # request breakdown:
+ # https://datatables.net/manual/server-side
+ _page = int(request.args.get("draw"))
+ _length = int(request.args.get("length") or '-1')
+ _start = int(request.args.get("start") or '0')
+ _search = request.args["search[value]"]
+ with (database_connection(app.config["SQL_URI"]) as conn,
+ conn.cursor(cursorclass=DictCursor) as cursor):
+ _publications, _current_rows, _totalfiltered, _totalrows = fetch_publications(
+ conn,
+ _search,
+ offset=_start,
+ limit=_length)
+
return json.dumps({
- "publications": tuple({
- **row, "index": idx
- } for idx,row in enumerate(
- fetch_publications(conn), start=1)),
+ "draw": _page,
+ "recordsTotal": _totalrows,
+ "recordsFiltered": _totalfiltered,
+ "publications": _publications,
"status": "success"
})
@@ -71,7 +86,7 @@ def create_publication():
with database_connection(app.config["SQL_URI"]) as conn:
publications = create_new_publications(conn, ({
- "pubmed_id": form.get("pubmed-id"),
+ "pubmed_id": form.get("pubmed-id") or None,
"abstract": form.get("publication-abstract").encode("utf8") or None,
"authors": authors,
"title": form.get("publication-title").encode("utf8") or None,
diff --git a/uploader/route_utils.py b/uploader/route_utils.py
index 18eadda..ce718fb 100644
--- a/uploader/route_utils.py
+++ b/uploader/route_utils.py
@@ -6,7 +6,8 @@ from gn_libs.mysqldb import database_connection
from uploader.population.models import (populations_by_species,
population_by_species_and_id)
-def generic_select_population(# pylint: disable=[too-many-arguments]
+def generic_select_population(
+ # pylint: disable=[too-many-arguments, too-many-positional-arguments]
species: dict,
template: str,
population_id: str,
diff --git a/uploader/static/css/styles.css b/uploader/static/css/styles.css
index 826ac41..df50dec 100644
--- a/uploader/static/css/styles.css
+++ b/uploader/static/css/styles.css
@@ -179,3 +179,9 @@ table.dataTable thead th, table.dataTable tfoot th{
table.dataTable tbody tr.selected td {
background-color: #ffee99 !important;
}
+
+.form-group {
+ margin-bottom: 2em;
+ padding-bottom: 0.2em;
+ border-bottom: solid gray 1px;
+}
diff --git a/uploader/templates/jobs/job-error.html b/uploader/templates/jobs/job-error.html
new file mode 100644
index 0000000..b3015fc
--- /dev/null
+++ b/uploader/templates/jobs/job-error.html
@@ -0,0 +1,17 @@
+{%extends "base.html"%}
+
+{%from "flash_messages.html" import flash_all_messages%}
+
+{%block title%}Background Jobs: Error{%endblock%}
+
+{%block pagetitle%}Background Jobs: Error{%endblock%}
+
+{%block contents%}
+
+<h1>Background Jobs: Error</h1>
+<p>Job <strong>{{job["job_id"]}}</strong> failed!</p>
+<p>The error details are in the "STDERR" section below.</p>
+
+<h2>STDERR</h2>
+<pre>{{job["stderr"]}}</pre>
+{%endblock%}
diff --git a/uploader/templates/jobs/job-status.html b/uploader/templates/jobs/job-status.html
index 2750fcd..83c02fd 100644
--- a/uploader/templates/jobs/job-status.html
+++ b/uploader/templates/jobs/job-status.html
@@ -13,7 +13,7 @@
{%block contents%}
<p>Status: {{job["metadata"]["status"]}}</p>
-<p>Status: {{job_type}}</p>
+<p>Job Type: {{job["metadata"]["job-type"]}}</p>
<h2>STDOUT</h2>
<pre>{{job["stdout"]}}</pre>
@@ -21,6 +21,4 @@
<h2>STDERR</h2>
<pre>{{job["stderr"]}}</pre>
-<hr />
-<p>The Job: {{job["metadata"]}}</p>
{%endblock%}
diff --git a/uploader/templates/phenotypes/add-phenotypes-base.html b/uploader/templates/phenotypes/add-phenotypes-base.html
index a7aaeb0..01cd0fe 100644
--- a/uploader/templates/phenotypes/add-phenotypes-base.html
+++ b/uploader/templates/phenotypes/add-phenotypes-base.html
@@ -92,48 +92,53 @@
[
{data: "index"},
{
+ searchable: true,
data: (pub) => {
- if(pub.PubMed_ID) {
- return `<a href="https://pubmed.ncbi.nlm.nih.gov/` +
- `${pub.PubMed_ID}/" target="_blank" ` +
- `title="Link to publication on NCBI.">` +
- `${pub.PubMed_ID}</a>`;
- }
- return "";
+ if(pub.PubMed_ID) {
+ return `<a href="https://pubmed.ncbi.nlm.nih.gov/` +
+ `${pub.PubMed_ID}/" target="_blank" ` +
+ `title="Link to publication on NCBI.">` +
+ `${pub.PubMed_ID}</a>`;
+ }
+ return "";
}
},
{
+ searchable: true,
data: (pub) => {
- var title = "⸻";
- if(pub.Title) {
- title = pub.Title
- }
- return `<a href="/publications/view/${pub.Id}" ` +
+ var title = "⸻";
+ if(pub.Title) {
+ title = pub.Title
+ }
+ return `<a href="/publications/view/${pub.Id}" ` +
`target="_blank" ` +
`title="Link to view publication details">` +
`${title}</a>`;
}
},
{
+ searchable: true,
data: (pub) => {
- authors = pub.Authors.split(",").map(
- (item) => {return item.trim();});
- if(authors.length > 1) {
- return authors[0] + ", et. al.";
- }
- return authors[0];
+ authors = pub.Authors.split(",").map(
+ (item) => {return item.trim();});
+ if(authors.length > 1) {
+ return authors[0] + ", et. al.";
+ }
+ return authors[0];
}
}
],
{
+ serverSide: true,
ajax: {
url: "/publications/list",
dataSrc: "publications"
},
select: "single",
+ paging: true,
scrollY: 700,
- paging: false,
deferRender: true,
+ scroller: true,
layout: {
topStart: "info",
topEnd: "search"
diff --git a/uploader/templates/phenotypes/create-dataset.html b/uploader/templates/phenotypes/create-dataset.html
index 8e45491..19a2b34 100644
--- a/uploader/templates/phenotypes/create-dataset.html
+++ b/uploader/templates/phenotypes/create-dataset.html
@@ -42,7 +42,7 @@
<input type="text"
name="dataset-name"
id="txt-dataset-name"
- value="{{original_formdata.get('dataset-name') or (population.InbredSetCode + 'Publish')}}"
+ value="{{original_formdata.get('dataset-name') or (population.Name + 'Publish')}}"
{%if errors["dataset-name"] is defined%}
class="form-control danger"
{%else%}
@@ -51,7 +51,7 @@
required="required" />
<small class="form-text text-muted">
<p>A short representative name for the dataset.</p>
- <p>Recommended: Use the population code and append "Publish" at the end.
+ <p>Recommended: Use the population name and append "Publish" at the end.
<br />This field will only accept names composed of
letters ('A-Za-z'), numbers (0-9), hyphens and underscores.</p>
</small>
@@ -86,7 +86,7 @@
name="dataset-shortname"
type="text"
class="form-control"
- value="{{original_formdata.get('dataset-shortname') or (population.InbredSetCode + ' Publish')}}" />
+ value="{{original_formdata.get('dataset-shortname') or (population.Name + 'Publish')}}" />
<small class="form-text text-muted">
<p>An optional, short name for the dataset. <br />
If this is not provided, it will default to the value provided for the
diff --git a/uploader/templates/phenotypes/load-phenotypes-success.html b/uploader/templates/phenotypes/load-phenotypes-success.html
new file mode 100644
index 0000000..3baca5b
--- /dev/null
+++ b/uploader/templates/phenotypes/load-phenotypes-success.html
@@ -0,0 +1,42 @@
+{%extends "phenotypes/base.html"%}
+{%from "flash_messages.html" import flash_all_messages%}
+{%from "macro-table-pagination.html" import table_pagination%}
+{%from "phenotypes/macro-display-pheno-dataset-card.html" import display_pheno_dataset_card%}
+
+{%block title%}Phenotypes{%endblock%}
+
+{%block pagetitle%}Phenotypes{%endblock%}
+
+{%block lvl4_breadcrumbs%}
+<li {%if activelink=="load-phenotypes-success"%}
+ class="breadcrumb-item active"
+ {%else%}
+ class="breadcrumb-item"
+ {%endif%}>
+ <a href="{{url_for('species.populations.phenotypes.add_phenotypes',
+ species_id=species.SpeciesId,
+ population_id=population.Id,
+ dataset_id=dataset.Id)}}">Add Phenotypes</a>
+</li>
+{%endblock%}
+
+{%block contents%}
+<div class="row">
+ <p>You have successfully loaded
+ <!-- maybe indicate the number of phenotypes here? -->your
+ new phenotypes into the database.</p>
+ <!-- TODO: Maybe notify user that they have sole access. -->
+ <!-- TODO: Maybe provide a link to go to GeneNetwork to view the data. -->
+ <p>View your data
+ <a href="{{gn2_server_url}}search?species={{species.Name}}&group={{population.Name}}&type=Phenotypes&dataset={{dataset.Name}}&search_terms_or=*%0D%0A&search_terms_and=*%0D%0A&accession_id=None&FormID=searchResult"
+ target="_blank">on GeneNetwork2</a>.
+ You might need to login to GeneNetwork2 to view specific traits.</p>
+</div>
+{%endblock%}
+
+{%block sidebarcontents%}
+{{display_pheno_dataset_card(species, population, dataset)}}
+{%endblock%}
+
+
+{%block more_javascript%}{%endblock%}