about summary refs log tree commit diff
path: root/uploader
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2025-06-03 12:09:59 -0500
committerFrederick Muriuki Muriithi2025-06-03 12:09:59 -0500
commit9280e6232152991fca762d74f415b704a452673d (patch)
treeefe5df2ee4674551d201f167e5520e579bfd9e88 /uploader
parent59b07a41ca5181a19e631f640b38e5fd33d1d550 (diff)
downloadgn-uploader-9280e6232152991fca762d74f415b704a452673d.tar.gz
Save data with `LOAD DATA INFILE …` query
To help speed up the saving of the data (for really huge files) into
the database, use the `LOAD DATA INFILE …` command if available, and
if not fallback to the one using raw queries.
Diffstat (limited to 'uploader')
-rw-r--r--uploader/phenotypes/models.py56
1 files changed, 47 insertions, 9 deletions
diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py
index e47a016..20b8e77 100644
--- a/uploader/phenotypes/models.py
+++ b/uploader/phenotypes/models.py
@@ -1,5 +1,7 @@
 """Database and utility functions for phenotypes."""
 import logging
+import tempfile
+from pathlib import Path
 from functools import reduce
 from datetime import datetime
 from typing import Optional, Iterable
@@ -13,6 +15,16 @@ from gn_libs.mysqldb import debug_query
 logger = logging.getLogger(__name__)
 
 
+__PHENO_DATA_TABLES__ = {
+    "PublishData": {
+        "table": "PublishData", "valueCol": "value", "DataIdCol": "Id"},
+    "PublishSE": {
+        "table": "PublishSE", "valueCol": "error", "DataIdCol": "DataId"},
+    "NStrain": {
+        "table": "NStrain", "valueCol": "count", "DataIdCol": "DataId"}
+}
+
+
 def datasets_by_population(
         conn: mdb.Connection,
         species_id: int,
@@ -326,14 +338,7 @@ def save_phenotypes_data(
         data: Iterable[dict]
 ) -> int:
     """Save new phenotypes data into the database."""
-    _table_details = {
-        "PublishData": {
-            "table": "PublishData", "valueCol": "value", "DataIdCol": "Id"},
-        "PublishSE": {
-            "table": "PublishSE", "valueCol": "error", "DataIdCol": "DataId"},
-        "NStrain": {
-            "table": "PublishData", "valueCol": "count", "DataIdCol": "DataId"}
-    }[table]
+    _table_details = __PHENO_DATA_TABLES__[table]
     with conn.cursor(cursorclass=DictCursor) as cursor:
         _count = 0
         while True:
@@ -347,7 +352,7 @@ def save_phenotypes_data(
                 (f"INSERT INTO {_table_details['table']}"
                  f"({_table_details['DataIdCol']}, StrainId, {_table_details['valueCol']}) "
                  "VALUES "
-                 f"(%(data_id)s, %(sample_id)s, %({_table_details['valueCol']})s) "),
+                 f"(%(data_id)s, %(sample_id)s, %(value)s) "),
                 tuple(batch))
             debug_query(cursor, logger)
             _count = _count + len(batch)
@@ -355,3 +360,36 @@ def save_phenotypes_data(
 
     logger.debug("Saved a total of %s data rows", _count)
     return _count
+
+
+def quick_save_phenotypes_data(
+        conn: mdb.Connection,
+        table: str,
+        dataitems: Iterable[dict],
+        tmpdir: Path
+) -> int:
+    """Save data items to the database, but using """
+    _table_details = __PHENO_DATA_TABLES__[table]
+    with (tempfile.NamedTemporaryFile(
+            prefix=f"{table}_data", mode="wt", dir=tmpdir) as tmpfile,
+          conn.cursor(cursorclass=DictCursor) as cursor):
+        _count = 0
+        console.debug("Write data rows to text file.")
+        for row in dataitems:
+            tmpfile.write(
+                f'{row["data_id"]}\t{row["sample_id"]}\t{row["value"]}\n')
+            _count = _count + 1
+        tmpfile.flush()
+
+        console.debug("Load text file into database (table: %s)",
+                      _table_details["table"])
+        cursor.execute(
+            f"LOAD DATA LOCAL INFILE '{tmpfile.name}' "
+            f"INTO TABLE {_table_details['table']} "
+            "("
+            f"{_table_details['DataIdCol']}, "
+            "StrainId, "
+            f"{_table_details['valueCol']}"
+            ")")
+        debug_query(cursor, logger)
+        return _count