about summary refs log tree commit diff
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2026-01-26 14:47:50 -0600
committerFrederick Muriuki Muriithi2026-01-26 14:47:50 -0600
commitb807626b37e9ac743287bfc7e092e71d77da5c26 (patch)
tree9fc550f625cd6bee094e1bc16396c727c819cb03
parent5705666ac0025ecc83c9cdb4ee0ebf94983ee069 (diff)
downloadgn-uploader-b807626b37e9ac743287bfc7e092e71d77da5c26.tar.gz
Provide some utilities to fetch common config variables.
-rw-r--r--uploader/configutils.py13
-rw-r--r--uploader/files/chunks.py4
-rw-r--r--uploader/files/functions.py4
-rw-r--r--uploader/files/views.py4
-rw-r--r--uploader/phenotypes/views.py19
5 files changed, 32 insertions, 12 deletions
diff --git a/uploader/configutils.py b/uploader/configutils.py
new file mode 100644
index 0000000..7cdb8b2
--- /dev/null
+++ b/uploader/configutils.py
@@ -0,0 +1,13 @@
+"""Functions to fetch settings."""
+from pathlib import Path
+
+def fetch_setting(app, setting):
+    """Fetch a specified configuration `setting` from the `app` object."""
+    return app.config[setting]
+
+def uploads_dir(app) -> Path:
+    """Fetch the uploads directory"""
+    _dir = Path(fetch_setting(app, "UPLOADS_DIR")).absolute()
+    assert _dir.exists() and _dir.is_dir(), (
+        f"'{_dir}' needs to be an existing directory.")
+    return _dir
diff --git a/uploader/files/chunks.py b/uploader/files/chunks.py
index c4360b5..f63f32f 100644
--- a/uploader/files/chunks.py
+++ b/uploader/files/chunks.py
@@ -5,6 +5,8 @@ from typing import Iterator
 from flask import current_app as app
 from werkzeug.utils import secure_filename
 
+from uploader.configutils import uploads_dir
+
 
 def chunked_binary_read(filepath: Path, chunksize: int = 2048) -> Iterator:
     """Read a file in binary mode in chunks."""
@@ -29,4 +31,4 @@ def chunks_directory(uniqueidentifier: str) -> Path:
     """Compute the directory where chunks are temporarily stored."""
     if uniqueidentifier == "":
         raise ValueError("Unique identifier cannot be empty!")
-    return Path(app.config["UPLOAD_FOLDER"], f"tempdir_{uniqueidentifier}")
+    return Path(uploads_dir(app), f"tempdir_{uniqueidentifier}")
diff --git a/uploader/files/functions.py b/uploader/files/functions.py
index 7b9f06b..68f4e16 100644
--- a/uploader/files/functions.py
+++ b/uploader/files/functions.py
@@ -8,6 +8,8 @@ from flask import current_app
 from werkzeug.utils import secure_filename
 from werkzeug.datastructures import FileStorage
 
+from uploader.configutils import uploads_dir
+
 from .chunks import chunked_binary_read
 
 def save_file(fileobj: FileStorage, upload_dir: Path, hashed: bool = True) -> Path:
@@ -30,7 +32,7 @@ def save_file(fileobj: FileStorage, upload_dir: Path, hashed: bool = True) -> Pa
 
 def fullpath(filename: str):
     """Get a file's full path. This makes use of `flask.current_app`."""
-    return Path(current_app.config["UPLOAD_FOLDER"], filename).absolute()
+    return Path(uploads_dir(current_app), filename).absolute()
 
 
 def sha256_digest_over_file(filepath: Path) -> str:
diff --git a/uploader/files/views.py b/uploader/files/views.py
index 29059c7..ea0e827 100644
--- a/uploader/files/views.py
+++ b/uploader/files/views.py
@@ -6,13 +6,15 @@ from pathlib import Path
 
 from flask import request, jsonify, Blueprint, current_app as app
 
+from uploader.configutils import uploads_dir
+
 from .chunks import chunk_name, chunks_directory
 
 files = Blueprint("files", __name__)
 
 def target_file(fileid: str) -> Path:
     """Compute the full path for the target file."""
-    return Path(app.config["UPLOAD_FOLDER"], fileid)
+    return Path(uploads_dir(app), fileid)
 
 
 @files.route("/upload/resumable", methods=["GET"])
diff --git a/uploader/phenotypes/views.py b/uploader/phenotypes/views.py
index b6a8c86..2cf0ca0 100644
--- a/uploader/phenotypes/views.py
+++ b/uploader/phenotypes/views.py
@@ -34,6 +34,7 @@ from r_qtl import exceptions as rqe
 from uploader import jobs
 from uploader import session
 from uploader.files import save_file
+from uploader.configutils import uploads_dir
 from uploader.flask_extensions import url_for
 from uploader.ui import make_template_renderer
 from uploader.oauth2.client import oauth2_post
@@ -329,7 +330,7 @@ def process_phenotypes_rqtl2_bundle(error_uri):
     try:
         ## Handle huge files here...
         phenobundle = save_file(request.files["phenotypes-bundle"],
-                                Path(app.config["UPLOAD_FOLDER"]))
+                                uploads_dir(app))
         rqc.validate_bundle(phenobundle)
         return phenobundle
     except AssertionError as _aerr:
@@ -352,7 +353,7 @@ def process_phenotypes_individual_files(error_uri):
         "comment.char": form["file-comment-character"],
         "na.strings": form["file-na"].split(" "),
     }
-    bundlepath = Path(app.config["UPLOAD_FOLDER"],
+    bundlepath = Path(uploads_dir(app),
                       f"{str(uuid.uuid4()).replace('-', '')}.zip")
     with ZipFile(bundlepath,mode="w") as zfile:
         for rqtlkey, formkey, _type in (
@@ -370,7 +371,7 @@ def process_phenotypes_individual_files(error_uri):
                 # Chunked upload of large files was used
                 filedata = json.loads(form[formkey])
                 zfile.write(
-                    Path(app.config["UPLOAD_FOLDER"], filedata["uploaded-file"]),
+                    Path(uploads_dir(app), filedata["uploaded-file"]),
                     arcname=filedata["original-name"])
                 cdata[rqtlkey] = cdata.get(rqtlkey, []) + [filedata["original-name"]]
             else:
@@ -382,9 +383,9 @@ def process_phenotypes_individual_files(error_uri):
                     return error_uri
 
                 filepath = save_file(
-                    _sentfile, Path(app.config["UPLOAD_FOLDER"]), hashed=False)
+                    _sentfile, uploads_dir(app), hashed=False)
                 zfile.write(
-                    Path(app.config["UPLOAD_FOLDER"], filepath),
+                    Path(uploads_dir(app), filepath),
                     arcname=filepath.name)
                 cdata[rqtlkey] = cdata.get(rqtlkey, []) + [filepath.name]
 
@@ -464,7 +465,7 @@ def add_phenotypes(species: dict, population: dict, dataset: dict, **kwargs):# p
                     **({"publicationid": request.form["publication-id"]}
                        if request.form.get("publication-id") else {})})}),
             _redisuri,
-            f"{app.config['UPLOAD_FOLDER']}/job_errors")
+            f"{uploads_dir(app)}/job_errors")
 
         app.logger.debug("JOB DETAILS: %s", _job)
         jobstatusuri = url_for("species.populations.phenotypes.job_status",
@@ -685,7 +686,7 @@ def load_data_to_database(
             lambda job: gnlibs_jobs.launch_job(
                 job,
                 _jobs_db,
-                Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"),
+                Path(f"{uploads_dir(app)}/job_errors"),
                 worker_manager="gn_libs.jobs.launcher",
                 loglevel=_loglevel)
         ).either(__handle_error__, __handle_success__)
@@ -1063,7 +1064,7 @@ def recompute_means(# pylint: disable=[unused-argument]
                 },
                 external_id=session.logged_in_user_id()),
             _jobs_db,
-            Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"),
+            Path(f"{uploads_dir(app)}/job_errors"),
             worker_manager="gn_libs.jobs.launcher",
             loglevel=_loglevel)
         return redirect(url_for("background-jobs.job_status",
@@ -1143,7 +1144,7 @@ def rerun_qtlreaper(# pylint: disable=[unused-argument]
             },
             external_id=session.logged_in_user_id()),
             _jobs_db,
-            Path(f"{app.config['UPLOAD_FOLDER']}/job_errors"),
+            Path(f"{uploads_dir(app)}/job_errors"),
             worker_manager="gn_libs.jobs.launcher",
             loglevel=_loglevel)
         return redirect(url_for("background-jobs.job_status",