about summary refs log tree commit diff
path: root/uploader
diff options
context:
space:
mode:
Diffstat (limited to 'uploader')
-rw-r--r--uploader/__init__.py4
-rw-r--r--uploader/background_jobs.py12
-rw-r--r--uploader/default_settings.py2
-rw-r--r--uploader/jobs.py8
-rw-r--r--uploader/oauth2/client.py5
-rw-r--r--uploader/phenotypes/misc.py2
-rw-r--r--uploader/phenotypes/models.py13
-rw-r--r--uploader/phenotypes/views.py2
-rw-r--r--uploader/population/views.py2
-rw-r--r--uploader/publications/datatables.py2
-rw-r--r--uploader/publications/misc.py4
-rw-r--r--uploader/publications/pubmed.py3
-rw-r--r--uploader/publications/views.py14
-rw-r--r--uploader/route_utils.py4
14 files changed, 43 insertions, 34 deletions
diff --git a/uploader/__init__.py b/uploader/__init__.py
index 7425b38..0ba1f81 100644
--- a/uploader/__init__.py
+++ b/uploader/__init__.py
@@ -11,7 +11,7 @@ from cachelib import FileSystemCache
 
 from gn_libs import jobs as gnlibs_jobs
 
-from flask_session import Session
+from flask_session import Session# type: ignore[attr-defined]
 
 
 from uploader.oauth2.client import user_logged_in, authserver_authorise_uri
@@ -103,7 +103,7 @@ def create_app(config: Optional[dict] = None):
     ### END: Application configuration
 
     app.config["SESSION_CACHELIB"] = FileSystemCache(
-        cache_dir=Path(app.config["SESSION_FILESYSTEM_CACHE_PATH"]).absolute(),
+        cache_dir=str(Path(app.config["SESSION_FILESYSTEM_CACHE_PATH"]).absolute()),
         threshold=int(app.config["SESSION_FILESYSTEM_CACHE_THRESHOLD"]),
         default_timeout=int(app.config["SESSION_FILESYSTEM_CACHE_TIMEOUT"]))
 
diff --git a/uploader/background_jobs.py b/uploader/background_jobs.py
index 4aded1d..4e1cd13 100644
--- a/uploader/background_jobs.py
+++ b/uploader/background_jobs.py
@@ -4,9 +4,9 @@ import importlib
 from typing import Callable
 from functools import partial
 
+from werkzeug.wrappers.response import Response
 from flask import (
     redirect,
-    Response,
     Blueprint,
     render_template,
     current_app as app)
@@ -48,7 +48,7 @@ def register_handlers(
     return job_type
 
 
-def register_job_handlers(job: str):
+def register_job_handlers(job: dict):
     """Related to register handlers above."""
     def __load_handler__(absolute_function_path):
         _parts = absolute_function_path.split(".")
@@ -79,8 +79,12 @@ def handler(job: dict, handler_type: str) -> HandlerType:
     ).get(handler_type)
     if bool(_handler):
         return _handler(job)
-    return render_template(sui_template("background-jobs/default-success-page.html"),
-                           job=job)
+
+    def __default_success_handler__(_job):
+        return render_template(
+            sui_template("background-jobs/default-success-page.html"), job=_job)
+
+    return __default_success_handler__
 
 
 error_handler = partial(handler, handler_type="error")
diff --git a/uploader/default_settings.py b/uploader/default_settings.py
index bb3a967..52cdad5 100644
--- a/uploader/default_settings.py
+++ b/uploader/default_settings.py
@@ -32,4 +32,4 @@ JWKS_DELETION_AGE_DAYS = 14 # Days (from creation) to keep a JWK around before d
 
 
 ## --- Feature flags ---
-FEATURE_FLAGS_HTTP = []
+FEATURE_FLAGS_HTTP: list[str] = []
diff --git a/uploader/jobs.py b/uploader/jobs.py
index 5968c03..b2de54b 100644
--- a/uploader/jobs.py
+++ b/uploader/jobs.py
@@ -147,8 +147,8 @@ def job_errors(
     return take(
         (
             json.loads(error)
-            for key in rconn.keys(f"{prefix}:{str(job_id)}:*:errors:*")
-            for error in rconn.lrange(key, 0, -1)),
+            for key in rconn.keys(f"{prefix}:{str(job_id)}:*:errors:*")# type: ignore[union-attr]
+            for error in rconn.lrange(key, 0, -1)),# type: ignore[union-attr]
         count)
 
 
@@ -160,8 +160,8 @@ def job_files_metadata(
     """Get the metadata for specific job file."""
     return {
         key.split(":")[-1]: {
-            **rconn.hgetall(key),
+            **rconn.hgetall(key),# type: ignore[dict-item]
             "filetype": key.split(":")[-3]
         }
-        for key in rconn.keys(f"{prefix}:{str(job_id)}:*:metadata*")
+        for key in rconn.keys(f"{prefix}:{str(job_id)}:*:metadata*")# type: ignore[union-attr]
     }
diff --git a/uploader/oauth2/client.py b/uploader/oauth2/client.py
index b94a044..4e81afd 100644
--- a/uploader/oauth2/client.py
+++ b/uploader/oauth2/client.py
@@ -157,7 +157,10 @@ def fetch_user_details() -> Either:
                 "user_id": uuid.UUID(usrdets["user_id"]),
                 "name": usrdets["name"],
                 "email": usrdets["email"],
-                "token": session.user_token()}))
+                "token": session.user_token(),
+                "logged_in": session.user_token().either(
+                    lambda _e: False, lambda _t: True)
+            }))
         return udets
     return Right(suser)
 
diff --git a/uploader/phenotypes/misc.py b/uploader/phenotypes/misc.py
index cbe3b7f..1924c07 100644
--- a/uploader/phenotypes/misc.py
+++ b/uploader/phenotypes/misc.py
@@ -8,7 +8,7 @@ def phenotypes_data_differences(
         filedata: tuple[dict, ...], dbdata: tuple[dict, ...]
 ) -> tuple[dict, ...]:
     """Compute differences between file data and db data"""
-    diff = tuple()
+    diff: tuple[dict, ...] = tuple()
     for filerow, dbrow in zip(
             sorted(filedata, key=lambda item: (item["phenotype_id"], item["xref_id"])),
             sorted(dbdata, key=lambda item: (item["PhenotypeId"], item["xref_id"]))):
diff --git a/uploader/phenotypes/models.py b/uploader/phenotypes/models.py
index e962e62..7c051d7 100644
--- a/uploader/phenotypes/models.py
+++ b/uploader/phenotypes/models.py
@@ -255,9 +255,9 @@ def phenotypes_vector_data(# pylint: disable=[too-many-arguments, too-many-posit
         xref_ids: tuple[int, ...] = tuple(),
         offset: int = 0,
         limit: Optional[int] = None
-) -> dict[tuple[int, int, int]: dict[str, Union[int,float]]]:
+) -> dict[tuple[int, int, int], dict[str, Union[int,float]]]:
     """Retrieve the vector data values for traits in the database."""
-    _params = (species_id, population_id)
+    _params: tuple[int, ...] = (species_id, population_id)
     _query = ("SELECT "
               "Species.Id AS SpeciesId, iset.Id AS InbredSetId, "
               "pxr.Id AS xref_id, pdata.*, Strain.Id AS StrainId, "
@@ -381,7 +381,7 @@ def create_new_phenotypes(# pylint: disable=[too-many-locals]
         phenotypes: Iterable[dict]
 ) -> tuple[dict, ...]:
     """Add entirely new phenotypes to the database. WARNING: Not thread-safe."""
-    _phenos = tuple()
+    _phenos: tuple[dict, ...] = tuple()
     with conn.cursor(cursorclass=DictCursor) as cursor:
         def make_next_id(idcol, table):
             cursor.execute(f"SELECT MAX({idcol}) AS last_id FROM {table}")
@@ -430,9 +430,10 @@ def create_new_phenotypes(# pylint: disable=[too-many-locals]
             if len(batch) == 0:
                 break
 
-            params, abbrevs = reduce(__build_params_and_prepubabbrevs__,
-                                     batch,
-                                     (tuple(), tuple()))
+            params, abbrevs = reduce(#type: ignore[var-annotated]
+                __build_params_and_prepubabbrevs__,
+                batch,
+                (tuple(), tuple()))
             # Check for uniqueness for all "Pre_publication_description" values
             abbrevs_paramsstr = ", ".join(["%s"] * len(abbrevs))
             _query = ("SELECT PublishXRef.PhenotypeId, Phenotype.* "
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index 9df7d81..42f2e34 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -805,7 +805,7 @@ def update_phenotype_data(conn, data: dict):
             }
         })
 
-    values, serrs, counts = tuple(
+    values, serrs, counts = tuple(# type: ignore[var-annotated]
         tuple({
             "data_id": row[0].split("::")[0],
             "strain_id": row[0].split("::")[1],
diff --git a/uploader/population/views.py b/uploader/population/views.py
index a6e2358..caee55b 100644
--- a/uploader/population/views.py
+++ b/uploader/population/views.py
@@ -157,7 +157,7 @@ def create_population(species_id: int):
             "FullName": population_fullname,
             "InbredSetCode": request.form.get("population_code") or None,
             "Description": request.form.get("population_description") or None,
-            "Family": request.form.get("population_family").strip() or None,
+            "Family": request.form.get("population_family", "").strip() or None,
             "MappingMethodId": request.form.get("population_mapping_method_id"),
             "GeneticType": request.form.get("population_genetic_type") or None
         })
diff --git a/uploader/publications/datatables.py b/uploader/publications/datatables.py
index e07fafd..8b3d4a0 100644
--- a/uploader/publications/datatables.py
+++ b/uploader/publications/datatables.py
@@ -13,7 +13,7 @@ def fetch_publications(
         search: Optional[str] = None,
         offset: int = 0,
         limit: int = -1
-) -> tuple[dict, int, int, int]:
+) -> tuple[tuple[dict, ...], int, int, int]:
     """Fetch publications from the database."""
     _query = "SELECT * FROM Publication"
     _count_query = "SELECT COUNT(*) FROM Publication"
diff --git a/uploader/publications/misc.py b/uploader/publications/misc.py
index fca6f71..f0ff9c7 100644
--- a/uploader/publications/misc.py
+++ b/uploader/publications/misc.py
@@ -4,10 +4,10 @@
 def publications_differences(
         filedata: tuple[dict, ...],
         dbdata: tuple[dict, ...],
-        pubmedid2pubidmap: tuple[dict, ...]
+        pubmedid2pubidmap: dict[int, int]
 ) -> tuple[dict, ...]:
     """Compute the differences between file data and db data"""
-    diff = tuple()
+    diff: tuple[dict, ...] = tuple()
     for filerow, dbrow in zip(
             sorted(filedata, key=lambda item: (
                 item["phenotype_id"], item["xref_id"])),
diff --git a/uploader/publications/pubmed.py b/uploader/publications/pubmed.py
index 2531c4a..15bf701 100644
--- a/uploader/publications/pubmed.py
+++ b/uploader/publications/pubmed.py
@@ -1,5 +1,6 @@
 """Module to interact with NCBI's PubMed"""
 import logging
+from typing import Optional
 
 import requests
 from lxml import etree
@@ -40,7 +41,7 @@ def __pages__(pagination: etree.Element) -> str:
     )) if start is not None else ""
 
 
-def __abstract__(article: etree.Element) -> str:
+def __abstract__(article: etree.Element) -> Optional[str]:
     abstract = article.find("Abstract/AbstractText")
     return abstract.text if abstract is not None else None
 
diff --git a/uploader/publications/views.py b/uploader/publications/views.py
index f0ec95a..11732db 100644
--- a/uploader/publications/views.py
+++ b/uploader/publications/views.py
@@ -140,14 +140,14 @@ def edit_publication(publication_id: int):
         _pub = update_publications(conn, ({
             "publication_id": publication_id,
             "pubmed_id": form.get("pubmed-id") or None,
-            "abstract": form.get("publication-abstract").encode("utf8") or None,
-            "authors": form.get("publication-authors").encode("utf8"),
-            "title":  form.get("publication-title").encode("utf8") or None,
-            "journal": form.get("publication-journal").encode("utf8") or None,
-            "volume": form.get("publication-volume").encode("utf8") or None,
-            "pages": form.get("publication-pages").encode("utf8") or None,
+            "abstract": (form.get("publication-abstract") or "").encode("utf8") or None,
+            "authors": (form.get("publication-authors") or "").encode("utf8"),
+            "title":  (form.get("publication-title") or "").encode("utf8") or None,
+            "journal": (form.get("publication-journal") or "").encode("utf8") or None,
+            "volume": (form.get("publication-volume") or "").encode("utf8") or None,
+            "pages": (form.get("publication-pages") or "").encode("utf8") or None,
             "month": (form.get("publication-month") or "").encode("utf8").capitalize() or None,
-            "year": form.get("publication-year").encode("utf8") or None
+            "year": (form.get("publication-year") or "").encode("utf8") or None
         },))
 
         if not _pub:
diff --git a/uploader/route_utils.py b/uploader/route_utils.py
index fa63233..426d7eb 100644
--- a/uploader/route_utils.py
+++ b/uploader/route_utils.py
@@ -56,7 +56,7 @@ def generic_select_population(
 def redirect_to_next(default: dict):
     """Redirect to the next uri if specified, else redirect to default."""
     assert "uri" in default, "You must provide at least the 'uri' value."
-    _next = request.args.get("next")
+    _next = request.args.get("next") or ""
     if bool(_next):
         try:
             next_page = base64_decode_to_dict(_next)
@@ -73,7 +73,7 @@ def redirect_to_next(default: dict):
         **{key:value for key,value in default.items() if key != "uri"}))
 
 
-def build_next_argument(uri: str, **kwargs) -> str:
+def build_next_argument(uri: str, **kwargs) -> bytes:
     """Build the `next` URI argument from provided details."""
     dumps_keywords = (
         "skipkeys", "ensure_ascii", "check_circular", "allow_nan", "cls",