about summary refs log tree commit diff
path: root/uploader/phenotypes
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2025-07-14 13:49:08 -0500
committerFrederick Muriuki Muriithi2025-07-14 13:49:08 -0500
commiteb6c0f2bddee49907a04e9d78cbb24598a190c23 (patch)
tree2899d2026afbfd55cc523c89bf6b10adf5debf5b /uploader/phenotypes
parent6f9a91f1cb92530ad50c608c1f59ff38acb1b1f5 (diff)
downloadgn-uploader-eb6c0f2bddee49907a04e9d78cbb24598a190c23.tar.gz
Fix linting issues.
Diffstat (limited to 'uploader/phenotypes')
-rw-r--r--uploader/phenotypes/models.py5
-rw-r--r--uploader/phenotypes/views.py23
2 files changed, 17 insertions, 11 deletions
diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py
index 4a1b2d5..4b8b223 100644
--- a/uploader/phenotypes/models.py
+++ b/uploader/phenotypes/models.py
@@ -92,7 +92,8 @@ def dataset_phenotypes(conn: mdb.Connection,
                        limit: Optional[int] = None) -> tuple[dict, ...]:
     """Fetch the actual phenotypes."""
     _query = (
-        "SELECT pheno.*, pxr.Id AS xref_id, pxr.InbredSetId, ist.InbredSetCode FROM Phenotype AS pheno "
+        "SELECT pheno.*, pxr.Id AS xref_id, pxr.InbredSetId, ist.InbredSetCode "
+        "FROM Phenotype AS pheno "
         "INNER JOIN PublishXRef AS pxr ON pheno.Id=pxr.PhenotypeId "
         "INNER JOIN PublishFreeze AS pf ON pxr.InbredSetId=pf.InbredSetId "
         "INNER JOIN InbredSet AS ist ON pf.InbredSetId=ist.Id "
@@ -218,7 +219,7 @@ def phenotype_by_id(
                 ).values())
             }
         if bool(_pheno) and len(_pheno.keys()) > 1:
-            raise Exception(
+            raise Exception(# pylint: disable=[broad-exception-raised]
                 "We found more than one phenotype with the same identifier!")
 
     return None
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index 04cdc3b..0e626ad 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -385,7 +385,7 @@ def process_phenotypes_individual_files(error_uri):
                     arcname=filedata["original-name"])
                 cdata[rqtlkey] = cdata.get(rqtlkey, []) + [filedata["original-name"]]
             else:
-                # TODO: Check this path: fix any bugs.
+                # T0DO: Check this path: fix any bugs.
                 _sentfile = request.files[formkey]
                 if not bool(_sentfile):
                     flash(f"Expected file ('{formkey}') was not provided.",
@@ -640,12 +640,16 @@ def load_data_to_database(
         **kwargs
 ):# pylint: disable=[unused-argument]
     """Load the data from the given QC job into the database."""
-    jobs_db = app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"]
+    _jobs_db = app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"]
     with (Redis.from_url(app.config["REDIS_URL"], decode_responses=True) as rconn,
-          sqlite3.connection(jobs_db) as conn):
+          sqlite3.connection(_jobs_db) as conn):
+        # T0DO: Maybe break the connection between the jobs here, pass:
+        # - the bundle name (rebuild the full path here.)
+        # - publication details, where separate
+        # - details about the files: e.g. total lines, etc
         qc_job = jobs.job(rconn, jobs.jobsnamespace(), request.form["data-qc-job-id"])
         _meta = json.loads(qc_job["job-metadata"])
-        load_job_id = uuid.uuid4()
+        _load_job_id = uuid.uuid4()
         _loglevel = logging.getLevelName(app.logger.getEffectiveLevel()).lower()
         command = [
             sys.executable,
@@ -653,8 +657,8 @@ def load_data_to_database(
             "-m",
             "scripts.load_phenotypes_to_db",
             app.config["SQL_URI"],
-            jobs_db,
-            str(load_job_id),
+            _jobs_db,
+            str(_load_job_id),
             "--log-level",
             _loglevel
         ]
@@ -674,7 +678,7 @@ def load_data_to_database(
         ).then(
             lambda token: gnlibs_jobs.initialise_job(
                 conn,
-                load_job_id,
+                _load_job_id,
                 command,
                 "load-new-phenotypes-data",
                 extra_meta={
@@ -692,7 +696,7 @@ def load_data_to_database(
         ).then(
             lambda job: gnlibs_jobs.launch_job(
                 job,
-                jobs_db,
+                _jobs_db,
                 Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"),
                 worker_manager="gn_libs.jobs.launcher",
                 loglevel=_loglevel)
@@ -972,6 +976,7 @@ def load_data_success(
         job_id: uuid.UUID,
         **kwargs
 ):# pylint: disable=[unused-argument]
+    """Display success page if loading data to database was successful."""
     with (database_connection(app.config["SQL_URI"]) as conn,
           sqlite3.connection(app.config["ASYNCHRONOUS_JOBS_SQLITE_DB"])
           as jobsconn):
@@ -1016,5 +1021,5 @@ def load_data_success(
                                            "FormID": "searchResult"
                                        }),
                                        fragment="")))
-        except JobNotFound as jnf:
+        except JobNotFound as _jnf:
             return render_template("jobs/job-not-found.html", job_id=job_id)