about summary refs log tree commit diff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/insert_samples.py16
-rw-r--r--scripts/phenotypes/__init__.py1
-rw-r--r--scripts/phenotypes/delete_phenotypes.py173
-rw-r--r--scripts/run_qtlreaper.py2
4 files changed, 178 insertions, 14 deletions
diff --git a/scripts/insert_samples.py b/scripts/insert_samples.py
index fc029f9..96ae8e2 100644
--- a/scripts/insert_samples.py
+++ b/scripts/insert_samples.py
@@ -6,10 +6,10 @@ import argparse
 import traceback
 
 import MySQLdb as mdb
-from redis import Redis
+
 from gn_libs.mysqldb import database_connection
 
-from uploader.check_connections import check_db, check_redis
+from uploader.check_connections import check_db
 from uploader.species.models import species_by_id
 from uploader.population.models import population_by_id
 from uploader.samples.models import (
@@ -35,7 +35,6 @@ class SeparatorAction(argparse.Action):
         setattr(namespace, self.dest, (chr(9) if values == "\\t" else values))
 
 def insert_samples(conn: mdb.Connection,# pylint: disable=[too-many-arguments, too-many-positional-arguments]
-                   rconn: Redis,# pylint: disable=[unused-argument]
                    speciesid: int,
                    populationid: int,
                    samplesfile: pathlib.Path,
@@ -119,11 +118,6 @@ if __name__ == "__main__":
             help=("The character used to delimit (surround?) the value in "
                   "each column."))
 
-        # == Script-specific extras ==
-        parser.add_argument("--redisuri",
-                            help="URL to initialise connection to redis",
-                            default="redis:///")
-
         args = parser.parse_args()
         return args
 
@@ -132,17 +126,13 @@ if __name__ == "__main__":
         status_code = 1 # Exit with an Exception
         args = cli_args()
         check_db(args.databaseuri)
-        check_redis(args.redisuri)
         if not args.samplesfile.exists():
             logging.error("File not found: '%s'.", args.samplesfile)
             return 2
 
-        with (Redis.from_url(args.redisuri, decode_responses=True) as rconn,
-              database_connection(args.databaseuri) as dbconn):
-
+        with database_connection(args.databaseuri) as dbconn:
             try:
                 status_code = insert_samples(dbconn,
-                                             rconn,
                                              args.speciesid,
                                              args.populationid,
                                              args.samplesfile,
diff --git a/scripts/phenotypes/__init__.py b/scripts/phenotypes/__init__.py
new file mode 100644
index 0000000..73ad839
--- /dev/null
+++ b/scripts/phenotypes/__init__.py
@@ -0,0 +1 @@
+"Scripts for dealing with phenotypes."
diff --git a/scripts/phenotypes/delete_phenotypes.py b/scripts/phenotypes/delete_phenotypes.py
new file mode 100644
index 0000000..461f3ec
--- /dev/null
+++ b/scripts/phenotypes/delete_phenotypes.py
@@ -0,0 +1,173 @@
+"""Delete phenotypes."""
+import sys
+import logging
+from pathlib import Path
+from typing import Optional
+from urllib.parse import urljoin
+from argparse import Namespace, ArgumentParser
+
+import requests
+from MySQLdb.cursors import DictCursor, BaseCursor
+
+from gn_libs.mysqldb import database_connection
+
+from uploader.phenotypes.models import delete_phenotypes
+from scripts.cli.logging import setup_logging
+from scripts.cli.options import (add_logging,
+                                 add_mariadb_uri,
+                                 add_population_id)
+
+logger = logging.getLogger(__name__)
+
+def read_xref_ids_file(filepath: Optional[Path]) -> tuple[int, ...]:
+    """Read the phenotypes' cross-reference IDS from file."""
+    if filepath is None:
+        return tuple()
+
+    logger.debug("Using file '%s' to retrieve XREF IDs for deletion.",
+                 filepath.name)
+    _ids: tuple[int, ...] = tuple()
+    with filepath.open(mode="r") as infile:
+        for line in infile.readlines():
+            try:
+                _ids += (int(line.strip()),)
+            except TypeError:
+                pass
+
+    return _ids
+
+
+def fetch_all_xref_ids(
+        cursor: BaseCursor, population_id: int) -> tuple[int, ...]:
+    """Fetch all cross-reference IDs."""
+    cursor.execute("SELECT Id FROM PublishXRef WHERE InbredSetId=%s",
+                   (population_id,))
+    return tuple(int(row["Id"]) for row in cursor.fetchall())
+
+
+def update_auth(
+        auth_details: tuple[str, str],
+        species_id: int,
+        population_id: int,
+        dataset_id: int,
+        xref_ids: tuple[int, ...] = tuple()
+):
+    """Update the authorisation server: remove items to delete."""
+    authserver, token = auth_details
+    resp = requests.post(
+        urljoin(authserver,
+                (f"/auth/data/phenotypes/{species_id}/{population_id}"
+                 f"/{dataset_id}/delete")),
+        timeout=(9.13, 20),
+        headers={
+            "Authorization": f"Bearer {token}",
+            "Content-Type": "application/json"
+        },
+        json={"xref_ids": xref_ids})
+    resp.raise_for_status()
+
+
+def delete_the_phenotypes(
+        cursor: BaseCursor,
+        population_id: int,
+        xref_ids: tuple[int, ...] = tuple()) -> int:
+    """Process and delete the phenotypes."""
+    delete_phenotypes(cursor, population_id, xref_ids)
+
+    return 0
+
+if __name__ == "__main__":
+    def parse_args() -> Namespace:
+        """Parse CLI arguments."""
+        parser = add_logging(
+            add_population_id(
+                add_mariadb_uri(
+                    ArgumentParser(
+                        prog="delete-phenotypes",
+                        description=(
+                            "Script to delete phenotypes from the database.")))))
+        parser.add_argument(
+            "dataset_id",
+            metavar="DATASET-ID",
+            type=int,
+            help="The dataset identifier for phenotypes to delete.")
+        parser.add_argument(
+            "auth_server_uri",
+            metavar="AUTH-SERVER-URI",
+            type=str,
+            help="URI to the authorisation server.")
+        parser.add_argument(
+            "auth_token",
+            metavar="AUTH-TOKEN",
+            type=str,
+            help=("Token to use to update the authorisation system with the "
+                  "deletions done."))
+        parser.add_argument(
+            "--xref_ids_file",
+            metavar="XREF-IDS-FILE",
+            type=Path,
+            help=("Path to a file with phenotypes cross-reference IDs to "
+                  "delete."))
+        parser.add_argument(
+            "--delete-all",
+            action="store_true",
+            help=("If no 'XREF-IDS-FILE' is provided, this flag determines "
+                  "whether or not all the phenotypes for the given population "
+                  "will be deleted."))
+        return parser.parse_args()
+
+
+    def main():
+        """The `delete-phenotypes` script's entry point."""
+        args = parse_args()
+        setup_logging(logger, args.log_level.upper(), tuple())
+        with (database_connection(args.db_uri) as conn,
+              conn.cursor(cursorclass=DictCursor) as cursor):
+            xref_ids = read_xref_ids_file(args.xref_ids_file)
+            try:
+                assert not (len(xref_ids) > 0 and args.delete_all)
+                xref_ids = (fetch_all_xref_ids(cursor, args.population_id)
+                            if args.delete_all else xref_ids)
+                logger.debug("Will delete %s phenotypes and related data",
+                             len(xref_ids))
+                if len(xref_ids) == 0:
+                    print("No cross-reference IDs were provided. Aborting.")
+                    return 0
+
+                print("Updating authorisations: ", end="")
+                update_auth((args.auth_server_uri, args.auth_token),
+                            args.species_id,
+                            args.population_id,
+                            args.dataset_id,
+                            xref_ids)
+                print("OK.")
+                print("Deleting the data: ", end="")
+                delete_phenotypes(cursor, args.population_id, xref_ids=xref_ids)
+                print("OK.")
+                if args.xref_ids_file is not None:
+                    print("Deleting temporary file: ", end="")
+                    args.xref_ids_file.unlink()
+                    print("OK.")
+
+                return 0
+            except AssertionError:
+                logger.error(
+                    "'DELETE-ALL' and 'XREF-IDS' are mutually exclusive. "
+                    "If you specify the list of XREF-IDS (in a file) to delete "
+                    "and also specify to 'DELETE-ALL' phenotypes in the "
+                    "population, we have no way of knowing what it is you want.")
+                return 1
+            except requests.exceptions.HTTPError as _exc:
+                resp = _exc.response
+                resp_data = resp.json()
+                logger.debug("%s: %s",
+                             resp_data["error"],
+                             resp_data["error_description"],
+                             exc_info=True)
+                return 1
+            except Exception as _exc:# pylint: disable=[broad-exception-caught]
+                logger.debug("Failed while attempting to delete phenotypes.",
+                             exc_info=True)
+                return 1
+
+    sys.exit(main())
diff --git a/scripts/run_qtlreaper.py b/scripts/run_qtlreaper.py
index 7d58402..54e5d45 100644
--- a/scripts/run_qtlreaper.py
+++ b/scripts/run_qtlreaper.py
@@ -169,7 +169,7 @@ def dispatch(args: Namespace) -> int:
             logger.info("Successfully computed p values for %s traits.", len(_traitsdata))
             return 0
         except FileNotFoundError as fnf:
-            logger.error(", ".join(fnf.args), exc_info=False)
+            logger.error(", ".join(str(arg) for arg in fnf.args), exc_info=False)
         except AssertionError as aserr:
             logger.error(", ".join(aserr.args), exc_info=False)
         except Exception as _exc:# pylint: disable=[broad-exception-caught]