about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--wqflask/base/data_set/__init__.py10
-rw-r--r--wqflask/base/data_set/datasetgroup.py22
-rw-r--r--wqflask/base/data_set/datasettype.py7
-rw-r--r--wqflask/base/data_set/markers.py10
-rw-r--r--wqflask/base/data_set/utils.py10
-rw-r--r--wqflask/base/trait.py20
-rw-r--r--wqflask/base/webqtlCaseData.py5
-rw-r--r--wqflask/base/webqtlConfig.py90
-rw-r--r--wqflask/runserver.py2
-rw-r--r--wqflask/utility/authentication_tools.py11
-rw-r--r--wqflask/utility/configuration.py165
-rw-r--r--wqflask/utility/hmac.py8
-rw-r--r--wqflask/utility/startup_config.py23
-rw-r--r--wqflask/utility/tools.py93
-rw-r--r--wqflask/wqflask/__init__.py15
-rw-r--r--wqflask/wqflask/collect.py31
-rw-r--r--wqflask/wqflask/correlation/pre_computes.py21
-rw-r--r--wqflask/wqflask/correlation_matrix/show_corr_matrix.py6
-rw-r--r--wqflask/wqflask/marker_regression/display_mapping_results.py9
-rw-r--r--wqflask/wqflask/marker_regression/plink_mapping.py20
-rw-r--r--wqflask/wqflask/marker_regression/rqtl_mapping.py14
-rw-r--r--wqflask/wqflask/marker_regression/run_mapping.py10
-rw-r--r--wqflask/wqflask/partial_correlations_views.py19
-rw-r--r--wqflask/wqflask/search_results.py6
-rw-r--r--wqflask/wqflask/top_level_routes.py16
-rw-r--r--wqflask/wqflask/views.py147
26 files changed, 465 insertions, 325 deletions
diff --git a/wqflask/base/data_set/__init__.py b/wqflask/base/data_set/__init__.py
index e49c6a93..ad51e47e 100644
--- a/wqflask/base/data_set/__init__.py
+++ b/wqflask/base/data_set/__init__.py
@@ -6,11 +6,14 @@ import pickle as pickle
 
 # 3rd-party imports
 from redis import Redis
+from flask import current_app as app
 
 # local imports
-from .dataset import DataSet
 from base import webqtlConfig
-from utility.tools import USE_REDIS
+from wqflask.database import database_connection
+from utility.configuration import get_setting_bool
+
+from .dataset import DataSet
 from .datasettype import DatasetType
 from .tempdataset import TempDataSet
 from .datasetgroup import DatasetGroup
@@ -18,7 +21,6 @@ from .utils import query_table_timestamp
 from .genotypedataset import GenotypeDataSet
 from .phenotypedataset import PhenotypeDataSet
 from .mrnaassaydataset import MrnaAssayDataSet
-from wqflask.database import database_connection
 
 # Used by create_database to instantiate objects
 # Each subclass will add to this
@@ -113,7 +115,7 @@ def datasets(group_name, this_group=None, redis_conn=Redis()):
                 dataset_menu.append(dict(tissue=tissue_name,
                                          datasets=[(dataset, dataset_short)]))
 
-    if USE_REDIS:
+    if get_setting_bool("USE_REDIS"):
         redis_conn.set(key, pickle.dumps(dataset_menu, pickle.HIGHEST_PROTOCOL))
         redis_conn.expire(key, 60 * 5)
 
diff --git a/wqflask/base/data_set/datasetgroup.py b/wqflask/base/data_set/datasetgroup.py
index 72577f38..90c59a1e 100644
--- a/wqflask/base/data_set/datasetgroup.py
+++ b/wqflask/base/data_set/datasetgroup.py
@@ -3,6 +3,7 @@
 import os
 import json
 
+from flask import current_app as app
 
 from base import webqtlConfig
 from .markers import Markers, HumanMarkers
@@ -11,11 +12,11 @@ from utility import gen_geno_ob
 from db import webqtlDatabaseFunction
 from maintenance import get_group_samplelists
 from wqflask.database import database_connection
-from utility.tools import (
+from utility.configuration import (
     locate,
-    USE_REDIS,
     flat_files,
     flat_file_exists,
+    get_setting_bool,
     locate_ignore_error)
 
 class DatasetGroup:
@@ -87,8 +88,8 @@ class DatasetGroup:
 
     def get_markers(self):
         def check_plink_gemma():
-            if flat_file_exists("mapping"):
-                MAPPING_PATH = flat_files("mapping") + "/"
+            if flat_file_exists(app, "mapping"):
+                MAPPING_PATH = flat_files(app, "mapping") + "/"
                 if os.path.isfile(MAPPING_PATH + self.name + ".bed"):
                     return True
             return False
@@ -117,6 +118,7 @@ class DatasetGroup:
 
     def get_study_samplelists(self):
         study_sample_file = locate_ignore_error(
+            app,
             self.name + ".json", 'study_sample_lists')
         try:
             f = open(study_sample_file)
@@ -137,13 +139,15 @@ class DatasetGroup:
     def get_samplelist(self, redis_conn):
         result = None
         key = "samplelist:v3:" + self.name
+        USE_REDIS = get_setting_bool(app, "USE_REDIS")
         if USE_REDIS:
             result = redis_conn.get(key)
 
         if result is not None:
             self.samplelist = json.loads(result)
         else:
-            genotype_fn = locate_ignore_error(self.name + ".geno", 'genotype')
+            genotype_fn = locate_ignore_error(
+                app, self.name + ".geno", 'genotype')
             if genotype_fn:
                 self.samplelist = get_group_samplelists.get_samplelist(
                     "geno", genotype_fn)
@@ -168,12 +172,12 @@ class DatasetGroup:
         # reaper barfs on unicode filenames, so here we ensure it's a string
         if self.genofile:
             if "RData" in self.genofile:  # ZS: This is a temporary fix; I need to change the way the JSON files that point to multiple genotype files are structured to point to other file types like RData
-                full_filename = str(
-                    locate(self.genofile.split(".")[0] + ".geno", 'genotype'))
+                full_filename = str(locate(
+                    app, self.genofile.split(".")[0] + ".geno", 'genotype'))
             else:
-                full_filename = str(locate(self.genofile, 'genotype'))
+                full_filename = str(locate(app, self.genofile, 'genotype'))
         else:
-            full_filename = str(locate(self.name + '.geno', 'genotype'))
+            full_filename = str(locate(app, self.name + '.geno', 'genotype'))
         genotype_1 = gen_geno_ob.genotype(full_filename)
 
         if genotype_1.type == "group" and self.parlist:
diff --git a/wqflask/base/data_set/datasettype.py b/wqflask/base/data_set/datasettype.py
index 05f0f564..ab36a797 100644
--- a/wqflask/base/data_set/datasettype.py
+++ b/wqflask/base/data_set/datasettype.py
@@ -4,11 +4,10 @@ import json
 import requests
 from typing import Optional, Dict
 
-
 from redis import Redis
+from flask import current_app as app
 
-
-from utility.tools import GN2_BASE_URL
+from utility.configuration import get_setting
 from wqflask.database import database_connection
 
 
@@ -41,7 +40,7 @@ class DatasetType:
             # emptied
             try:
                 data = json.loads(requests.get(
-                    GN2_BASE_URL + "/api/v_pre1/gen_dropdown",
+                    get_setting(app, "GN2_BASE_URL") + "/api/v_pre1/gen_dropdown",
                     timeout=5).content)
                 for _species in data['datasets']:
                     for group in data['datasets'][_species]:
diff --git a/wqflask/base/data_set/markers.py b/wqflask/base/data_set/markers.py
index 6f56445e..2fa7cce0 100644
--- a/wqflask/base/data_set/markers.py
+++ b/wqflask/base/data_set/markers.py
@@ -2,16 +2,18 @@
 
 import math
 
-from utility.tools import locate, flat_files
+from flask import current_app as app
+
+from utility.configuration import locate, flat_files
 
 class Markers:
     """Todo: Build in cacheing so it saves us reading the same file more than once"""
 
     def __init__(self, name):
-        json_data_fh = open(locate(name + ".json", 'genotype/json'))
+        json_data_fh = open(locate(app, name + ".json", 'genotype/json'))
 
         markers = []
-        with open("%s/%s_snps.txt" % (flat_files('genotype/bimbam'), name), 'r') as bimbam_fh:
+        with open("%s/%s_snps.txt" % (flat_files(app, 'genotype/bimbam'), name), 'r') as bimbam_fh:
             if len(bimbam_fh.readline().split(", ")) > 2:
                 delimiter = ", "
             elif len(bimbam_fh.readline().split(",")) > 2:
@@ -73,7 +75,7 @@ class HumanMarkers(Markers):
     "Markers for humans ..."
 
     def __init__(self, name, specified_markers=[]):
-        marker_data_fh = open(flat_files('mapping') + '/' + name + '.bim')
+        marker_data_fh = open(flat_files(app, 'mapping') + '/' + name + '.bim')
         self.markers = []
         for line in marker_data_fh:
             splat = line.strip().split()
diff --git a/wqflask/base/data_set/utils.py b/wqflask/base/data_set/utils.py
index 703fee04..465538af 100644
--- a/wqflask/base/data_set/utils.py
+++ b/wqflask/base/data_set/utils.py
@@ -6,9 +6,9 @@ import json
 import hashlib
 from typing import List
 
+from flask import current_app as app
 
-from utility.tools import SQL_URI
-from base.webqtlConfig import TMPDIR
+from utility.configuration import get_setting
 from wqflask.database import parse_db_url, database_connection
 
 def geno_mrna_confidentiality(ob):
@@ -27,7 +27,7 @@ def query_table_timestamp(dataset_type: str):
 
     # computation data and actions
     with database_connection() as conn, conn.cursor() as cursor:
-        fetch_db_name = parse_db_url(SQL_URI)
+        fetch_db_name = parse_db_url(get_setting("SQL_URI"))
         cursor.execute(
             "SELECT UPDATE_TIME FROM "
             "information_schema.tables "
@@ -57,7 +57,7 @@ def cache_dataset_results(dataset_name: str, dataset_type: str, samplelist: List
     samplelist_as_str = ",".join(samplelist)
 
     file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str)
-    file_path = os.path.join(TMPDIR, f"{file_name}.json")
+    file_path = os.path.join(app.config["WEBQTL_TMPDIR"], f"{file_name}.json")
 
     with open(file_path, "w") as file_handler:
         json.dump(query_results, file_handler)
@@ -70,7 +70,7 @@ def fetch_cached_results(dataset_name: str, dataset_type: str, samplelist: List)
     samplelist_as_str = ",".join(samplelist)
 
     file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str)
-    file_path = os.path.join(TMPDIR, f"{file_name}.json")
+    file_path = os.path.join(app.config["WEBQTL_TMPDIR"], f"{file_name}.json")
     try:
         with open(file_path, "r") as file_handler:
 
diff --git a/wqflask/base/trait.py b/wqflask/base/trait.py
index 37085448..70afa2cc 100644
--- a/wqflask/base/trait.py
+++ b/wqflask/base/trait.py
@@ -1,22 +1,21 @@
 import requests
 import simplejson as json
-from wqflask import app
+
+from flask import g, request, url_for, Blueprint, current_app as app
 
 import utility.hmac as hmac
 from base import webqtlConfig
 from base.webqtlCaseData import webqtlCaseData
 from base.data_set import create_dataset
 from utility.authentication_tools import check_resource_availability
-from utility.tools import GN2_BASE_URL
+from utility.configuration import get_setting
 from utility.redis_tools import get_redis_conn, get_resource_id
 
-from flask import g, request, url_for
-
 from wqflask.database import database_connection
 
 
 Redis = get_redis_conn()
-
+trait_bp = Blueprint("trait", __name__)
 
 def create_trait(**kw):
     assert bool(kw.get('dataset')) != bool(
@@ -173,11 +172,14 @@ class GeneralTrait:
         alias = 'Not available'
         if self.symbol:
             human_response = requests.get(
-                GN2_BASE_URL + "gn3/gene/aliases/" + self.symbol.upper())
+                get_setting("GN2_BASE_URL") + "gn3/gene/aliases/" +
+                self.symbol.upper())
             mouse_response = requests.get(
-                GN2_BASE_URL + "gn3/gene/aliases/" + self.symbol.capitalize())
+                get_setting("GN2_BASE_URL") + "gn3/gene/aliases/" +
+                self.symbol.capitalize())
             other_response = requests.get(
-                GN2_BASE_URL + "gn3/gene/aliases/" + self.symbol.lower())
+                get_setting("GN2_BASE_URL") + "gn3/gene/aliases/" +
+                self.symbol.lower())
 
             if human_response and mouse_response and other_response:
                 alias_list = json.loads(human_response.content) + json.loads(
@@ -254,7 +256,7 @@ def retrieve_sample_data(trait, dataset, samplelist=None):
     return trait
 
 
-@app.route("/trait/get_sample_data")
+@trait_bp.route("/trait/get_sample_data")
 def get_sample_data():
     params = request.args
     trait = params['trait']
diff --git a/wqflask/base/webqtlCaseData.py b/wqflask/base/webqtlCaseData.py
index dd6fad04..d144a342 100644
--- a/wqflask/base/webqtlCaseData.py
+++ b/wqflask/base/webqtlCaseData.py
@@ -21,11 +21,6 @@
 # Created by GeneNetwork Core Team 2010/08/10
 
 
-import utility.tools
-
-utility.tools.show_settings()
-
-
 class webqtlCaseData:
     """one case data in one trait"""
 
diff --git a/wqflask/base/webqtlConfig.py b/wqflask/base/webqtlConfig.py
index a7dbed3d..296bd314 100644
--- a/wqflask/base/webqtlConfig.py
+++ b/wqflask/base/webqtlConfig.py
@@ -8,7 +8,14 @@
 #
 #########################################
 import os
-from utility.tools import valid_path, mk_dir, assert_dir, assert_writable_dir, flat_files, TEMPDIR
+from functools import partial
+
+from utility.configuration import (
+    mk_dir,
+    valid_path,
+    flat_files,
+    assert_dir,
+    assert_writable_dir)
 
 # Debug Level
 # 1 for debug, mod python will reload import each time
@@ -69,39 +76,48 @@ PHENOGEN_URL = "https://phenogen.org/gene.jsp?speciesCB=Rn&auto=Y&geneTxt=%s&gen
 RRID_MOUSE_URL = "https://www.jax.org/strain/%s"
 RRID_RAT_URL = "https://rgd.mcw.edu/rgdweb/report/strain/main.html?id=%s"
 
-# Temporary storage (note that this TMPDIR can be set as an
-# environment variable - use utility.tools.TEMPDIR when you
-# want to reach this base dir
-assert_writable_dir(TEMPDIR)
-
-TMPDIR = mk_dir(TEMPDIR + '/gn2/')
-assert_writable_dir(TMPDIR)
-
-CACHEDIR = mk_dir(TMPDIR + '/cache/')
-# We can no longer write into the git tree:
-GENERATED_IMAGE_DIR = mk_dir(TMPDIR + 'generated/')
-GENERATED_TEXT_DIR = mk_dir(TMPDIR + 'generated_text/')
-
-# Make sure we have permissions to access these
-assert_writable_dir(CACHEDIR)
-assert_writable_dir(GENERATED_IMAGE_DIR)
-assert_writable_dir(GENERATED_TEXT_DIR)
-
-# Flat file directories
-GENODIR = flat_files('genotype') + '/'
-assert_dir(GENODIR)
-# assert_dir(GENODIR+'bimbam') # for gemma
-
-# JSON genotypes are OBSOLETE
-JSON_GENODIR = flat_files('genotype/json') + '/'
-if not valid_path(JSON_GENODIR):
-    # fall back on old location (move the dir, FIXME)
-    JSON_GENODIR = flat_files('json')
-
-
-TEXTDIR = os.path.join(os.environ.get(
-    "GNSHARE", "/gnshare/gn/"), "web/ProbeSetFreeze_DataMatrix")
-# Are we using the following...?
-PORTADDR = "http://50.16.251.170"
-INFOPAGEHREF = '/dbdoc/%s.html'
-CGIDIR = '/webqtl/'  # XZ: The variable name 'CGIDIR' should be changed to 'PYTHONDIR'
+def mkdir_with_assert_writable(parent_dir, child_dir):
+    """
+    Make a directory `child_dir` as a child of `parent_dir` asserting that they
+    are both writable."""
+    return assert_writable_dir(mk_dir(
+        assert_writable_dir(parent_dir) + child_dir))
+
+def init_app(app):
+    """Initialise the application with configurations for webqtl."""
+    # Temporary storage (note that this TMPDIR can be set as an
+    # environment variable - use utility.tools.TEMPDIR when you
+    # want to reach this base dir)
+    TEMPDIR = app.config["TEMPDIR"]
+    mkdir_with_temp_dir = lambda child: mkdir_with_assert_writable(
+        TEMPDIR, child)
+    WEBQTL_TMPDIR = mkdir_with_temp_dir("/gn2/")
+    app.config["WEBQTL_TMPDIR"] = WEBQTL_TMPDIR
+    app.config["WEBQTL_CACHEDIR"] = mkdir_with_temp_dir(
+        f"{WEBQTL_TMPDIR}cache/")
+
+    # We can no longer write into the git tree:
+    app.config["WEBQTL_GENERATED_IMAGE_DIR"] = mkdir_with_temp_dir(
+        f"{WEBQTL_TMPDIR}generated/")
+    app.config["WEBQTL_GENERATED_TEXT_DIR"] = mkdir_with_temp_dir(
+        f"{WEBQTL_TMPDIR}generated_text/")
+
+    # Flat file directories
+    app.config["WEBQTL_GENODIR"] = flat_files(app, 'genotype/')
+
+    # JSON genotypes are OBSOLETE
+    WEBQTL_JSON_GENODIR = flat_files(app, 'genotype/json/')
+    if not valid_path(WEBQTL_JSON_GENODIR):
+        # fall back on old location (move the dir, FIXME)
+        WEBQTL_JSON_GENODIR = flat_files('json')
+    app.config["WEBQTL_JSON_GENODIR"] = WEBQTL_JSON_GENODIR
+
+
+    app.config["WEBQTL_TEXTDIR"] = os.path.join(
+        app.config.get("GNSHARE", "/gnshare/gn/"),
+        "web/ProbeSetFreeze_DataMatrix")
+    # Are we using the following...?
+    app.config["WEBQTL_PORTADDR"] = "http://50.16.251.170"
+    app.config["WEBQTL_INFOPAGEHREF"] = '/dbdoc/%s.html'
+    app.config["WEBQTL_CGIDIR"] = '/webqtl/'  # XZ: The variable name 'CGIDIR' should be changed to 'PYTHONDIR'
+    return app
diff --git a/wqflask/runserver.py b/wqflask/runserver.py
index fee29be1..6dedbd24 100644
--- a/wqflask/runserver.py
+++ b/wqflask/runserver.py
@@ -18,7 +18,7 @@ GREEN = '\033[92m'
 BOLD = '\033[1m'
 ENDC = '\033[0m'
 
-app_config()
+app_config(app)
 
 werkzeug_logger = logging.getLogger('werkzeug')
 
diff --git a/wqflask/utility/authentication_tools.py b/wqflask/utility/authentication_tools.py
index 7d80b3fb..3d732228 100644
--- a/wqflask/utility/authentication_tools.py
+++ b/wqflask/utility/authentication_tools.py
@@ -1,7 +1,7 @@
 import json
 import requests
 
-from flask import g
+from flask import g, current_app as app
 from wqflask.database import database_connection
 from base import webqtlConfig
 
@@ -9,7 +9,7 @@ from utility.redis_tools import (get_redis_conn,
                                  get_resource_info,
                                  get_resource_id,
                                  add_resource)
-from utility.tools import GN_PROXY_URL
+from utility.configuration import get_setting
 
 Redis = get_redis_conn()
 
@@ -37,7 +37,7 @@ def check_resource_availability(dataset, user_id, trait_id=None):
         return webqtlConfig.SUPER_PRIVILEGES
 
     response = None
-    the_url = f"{GN_PROXY_URL}available?resource={resource_id}&user={user_id}"
+    the_url = f"{get_setting('GN_PROXY_URL')}available?resource={resource_id}&user={user_id}"
     try:
         response = json.loads(requests.get(the_url).content)
     except:
@@ -93,8 +93,9 @@ def get_group_code(dataset):
 
 
 def check_admin(resource_id=None):
-    the_url = GN_PROXY_URL + "available?resource={}&user={}".format(
-        resource_id, g.user_session.user_id)
+    the_url = (
+        f"{get_setting('GN_PROXY_URL')}available?resource={resource_id}"
+        f"&user={g.user_session.user_id}")
     try:
         response = json.loads(requests.get(the_url).content)['admin']
     except:
diff --git a/wqflask/utility/configuration.py b/wqflask/utility/configuration.py
new file mode 100644
index 00000000..933d9626
--- /dev/null
+++ b/wqflask/utility/configuration.py
@@ -0,0 +1,165 @@
+"""Functions used in setting up configurations."""
+import os # replace os.path with pathlib.Path
+import sys
+import logging
+
+from flask import current_app
+
+logger = logging.getLogger(__name__)
+
+def override_from_envvars(app):
+    """
+    Override `app` configuration values with those in the enviroment variables with the same names.
+    """
+    configs = dict((key, value.strip()) for key,value in
+                   ((key, os.environ.get(key)) for key in app.config.keys())
+                   if value is not None and value != "")
+    app.config.update(**configs)
+    return app
+
+def get_setting(app, setting_id, guess=None):
+    """Resolve a setting from the `app`."""
+    setting = app.config.get(setting_id, guess or "")
+    if setting is None or setting == "":
+        raise Exception(
+            f"{setting_id} setting unknown or faulty "
+            "(update default_settings.py?).")
+    return setting
+
+def get_setting_bool(app, setting_id):
+    v = get_setting(app, setting_id)
+    if v not in [0, False, 'False', 'FALSE', None]:
+        return True
+    return False
+
+
+def get_setting_int(app, setting_id):
+    val = get_setting(app, setting_id)
+    if isinstance(val, str):
+        return int(val)
+    if val is None:
+        return 0
+    return val
+
+def valid_bin(path):
+    if os.path.islink(path) or valid_file(path):
+        return path
+    return None
+
+def valid_file(path):
+    if os.path.isfile(path):
+        return path
+    return None
+
+def valid_path(path):
+    if os.path.isdir(path):
+        return path
+    return None
+
+def flat_file_exists(app, subdir):
+    base = get_setting(app, "GENENETWORK_FILES")
+    return valid_path(base + "/" + subdir)
+
+def flat_files(app, subdir=None):
+    base = get_setting(app, "GENENETWORK_FILES")
+    if subdir:
+        return assert_dir(base + "/" + subdir)
+    return assert_dir(base)
+
+def assert_bin(fn):
+    if not valid_bin(fn):
+        raise Exception("ERROR: can not find binary " + fn)
+    return fn
+
+
+def assert_dir(the_dir):
+    if not valid_path(the_dir):
+        raise FileNotFoundError(f"ERROR: can not find directory '{the_dir}'")
+    return the_dir
+
+def assert_writable_dir(path):
+    try:
+        fn = path + "/test.txt"
+        fh = open(fn, 'w')
+        fh.write("I am writing this text to the file\n")
+        fh.close()
+        os.remove(fn)
+    except IOError:
+        raise Exception(f"Unable to write test.txt to directory {path}")
+    return path
+
+def assert_file(fn):
+    if not valid_file(fn):
+        raise FileNotFoundError(f"Unable to find file '{fn}'")
+    return fn
+
+def mk_dir(path):
+    if not valid_path(path):
+        os.makedirs(path)
+    return assert_dir(path)
+
+def locate(app, name, subdir=None):
+    """
+    Locate a static flat file in the GENENETWORK_FILES environment.
+
+    This function throws an error when the file is not found.
+    """
+    base = get_setting(app, "GENENETWORK_FILES")
+    if subdir:
+        base = base + "/" + subdir
+    if valid_path(base):
+        lookfor = base + "/" + name
+        if valid_file(lookfor):
+            return lookfor
+        else:
+            raise Exception("Can not locate " + lookfor)
+    if subdir:
+        sys.stderr.write(subdir)
+    raise Exception("Can not locate " + name + " in " + base)
+
+def locate_ignore_error(app, name, subdir=None):
+    """
+    Locate a static flat file in the GENENETWORK_FILES environment.
+
+    This function does not throw an error when the file is not found
+    but returns None.
+    """
+    base = get_setting(app, "GENENETWORK_FILES")
+    if subdir:
+        base = base + "/" + subdir
+    if valid_path(base):
+        lookfor = base + "/" + name
+        if valid_file(lookfor):
+            return lookfor
+    return None
+
+def tempdir(app):
+    """Retrieve the configured temporary directory or `/tmp`."""
+    return valid_path(get_setting(app, "TMPDIR", "/tmp"))
+
+def show_settings(app):
+    """Print out the application configurations."""
+    BLUE = '\033[94m'
+    GREEN = '\033[92m'
+    BOLD = '\033[1m'
+    ENDC = '\033[0m'
+    app = app or current_app
+    LOG_LEVEL = app.config.get("LOG_LEVEL")
+
+    print(("Set global log level to " + BLUE + LOG_LEVEL + ENDC),
+          file=sys.stderr)
+    log_level = getattr(logging, LOG_LEVEL.upper())
+    logging.basicConfig(level=log_level)
+
+    logger.info(BLUE + "Mr. Mojo Risin 2" + ENDC)
+    keylist = list(app.config.keys())
+    print("runserver.py: ****** Webserver configuration - k,v pairs from app.config ******",
+          file=sys.stderr)
+    keylist.sort()
+    for k in keylist:
+        try:
+            print(("%s: %s%s%s%s" % (k, BLUE, BOLD, get_setting(app, k), ENDC)),
+                  file=sys.stderr)
+        except:
+            print(("%s: %s%s%s%s" % (k, GREEN, BOLD, app.config[k], ENDC)),
+                  file=sys.stderr)
diff --git a/wqflask/utility/hmac.py b/wqflask/utility/hmac.py
index 29891677..2fb8b106 100644
--- a/wqflask/utility/hmac.py
+++ b/wqflask/utility/hmac.py
@@ -1,9 +1,7 @@
 import hmac
 import hashlib
 
-from flask import url_for
-
-from wqflask import app
+from flask import url_for, current_app as app
 
 
 def hmac_creation(stringy):
@@ -37,7 +35,3 @@ def url_for_hmac(endpoint, **values):
     else:
         combiner = "?"
     return url + combiner + "hm=" + hm
-
-
-app.jinja_env.globals.update(url_for_hmac=url_for_hmac,
-                             data_hmac=data_hmac)
diff --git a/wqflask/utility/startup_config.py b/wqflask/utility/startup_config.py
index 69cac124..5ab43b1a 100644
--- a/wqflask/utility/startup_config.py
+++ b/wqflask/utility/startup_config.py
@@ -1,11 +1,10 @@
 
 from wqflask import app
 
-from utility.tools import WEBSERVER_MODE
-from utility.tools import show_settings
-from utility.tools import get_setting_int
-from utility.tools import get_setting
-from utility.tools import get_setting_bool
+from utility.configuration import show_settings
+from utility.configuration import get_setting_int
+from utility.configuration import get_setting
+from utility.configuration import get_setting_bool
 
 
 BLUE = '\033[94m'
@@ -14,28 +13,28 @@ BOLD = '\033[1m'
 ENDC = '\033[0m'
 
 
-def app_config():
+def app_config(app):
     app.config['SESSION_TYPE'] = app.config.get('SESSION_TYPE', 'filesystem')
     if not app.config.get('SECRET_KEY'):
         import os
         app.config['SECRET_KEY'] = str(os.urandom(24))
-    mode = WEBSERVER_MODE
+    mode = get_setting(app, "WEBSERVER_MODE")
     if mode in ["DEV", "DEBUG"]:
         app.config['TEMPLATES_AUTO_RELOAD'] = True
         if mode == "DEBUG":
             app.debug = True
 
     print("==========================================")
-    show_settings()
+    show_settings(app)
 
-    port = get_setting_int("SERVER_PORT")
+    port = get_setting_int(app, "SERVER_PORT")
 
-    if get_setting_bool("USE_GN_SERVER"):
+    if get_setting_bool(app, "USE_GN_SERVER"):
         print(f"GN2 API server URL is [{BLUE}GN_SERVER_URL{ENDC}]")
         import requests
-        page = requests.get(get_setting("GN_SERVER_URL"))
+        page = requests.get(get_setting(app, "GN_SERVER_URL"))
         if page.status_code != 200:
             raise Exception("API server not found!")
     print(f"GN2 is running. Visit {BLUE}"
           f"[http://localhost:{str(port)}/{ENDC}]"
-          f"({get_setting('WEBSERVER_URL')})")
+          f"({get_setting(app, 'WEBSERVER_URL')})")
diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py
index 5b3e9413..894bef76 100644
--- a/wqflask/utility/tools.py
+++ b/wqflask/utility/tools.py
@@ -7,6 +7,16 @@ import json
 
 from wqflask import app
 
+from .configuration import (
+    mk_dir,
+    valid_bin,
+    valid_file,
+    valid_path,
+    assert_bin,
+    assert_dir,
+    assert_file,
+    assert_writable_dir)
+
 # Use the standard logger here to avoid a circular dependency
 import logging
 logger = logging.getLogger(__name__)
@@ -86,24 +96,6 @@ def get_setting_int(id):
     return v
 
 
-def valid_bin(bin):
-    if os.path.islink(bin) or valid_file(bin):
-        return bin
-    return None
-
-
-def valid_file(fn):
-    if os.path.isfile(fn):
-        return fn
-    return None
-
-
-def valid_path(dir):
-    if os.path.isdir(dir):
-        return dir
-    return None
-
-
 def js_path(module=None):
     """
     Find the JS module in the two paths
@@ -146,42 +138,6 @@ def flat_files(subdir=None):
     return assert_dir(base)
 
 
-def assert_bin(fn):
-    if not valid_bin(fn):
-        raise Exception("ERROR: can not find binary " + fn)
-    return fn
-
-
-def assert_dir(the_dir):
-    if not valid_path(the_dir):
-        raise FileNotFoundError(f"ERROR: can not find directory '{the_dir}'")
-    return the_dir
-
-
-def assert_writable_dir(dir):
-    try:
-        fn = dir + "/test.txt"
-        fh = open(fn, 'w')
-        fh.write("I am writing this text to the file\n")
-        fh.close()
-        os.remove(fn)
-    except IOError:
-        raise Exception('Unable to write test.txt to directory ' + dir)
-    return dir
-
-
-def assert_file(fn):
-    if not valid_file(fn):
-        raise FileNotFoundError(f"Unable to find file '{fn}'")
-    return fn
-
-
-def mk_dir(dir):
-    if not valid_path(dir):
-        os.makedirs(dir)
-    return assert_dir(dir)
-
-
 def locate(name, subdir=None):
     """
     Locate a static flat file in the GENENETWORK_FILES environment.
@@ -230,35 +186,6 @@ def tempdir():
     return valid_path(get_setting("TMPDIR", "/tmp"))
 
 
-BLUE = '\033[94m'
-GREEN = '\033[92m'
-BOLD = '\033[1m'
-ENDC = '\033[0m'
-
-
-def show_settings():
-    from utility.tools import LOG_LEVEL
-
-    print(("Set global log level to " + BLUE + LOG_LEVEL + ENDC),
-          file=sys.stderr)
-    log_level = getattr(logging, LOG_LEVEL.upper())
-    logging.basicConfig(level=log_level)
-
-    logger.info(OVERRIDES)
-    logger.info(BLUE + "Mr. Mojo Risin 2" + ENDC)
-    keylist = list(app.config.keys())
-    print("runserver.py: ****** Webserver configuration - k,v pairs from app.config ******",
-          file=sys.stderr)
-    keylist.sort()
-    for k in keylist:
-        try:
-            print(("%s: %s%s%s%s" % (k, BLUE, BOLD, get_setting(k), ENDC)),
-                  file=sys.stderr)
-        except:
-            print(("%s: %s%s%s%s" % (k, GREEN, BOLD, app.config[k], ENDC)),
-                  file=sys.stderr)
-
-
 # Cached values
 GN_VERSION = get_setting('GN_VERSION')
 HOME = get_setting('HOME')
diff --git a/wqflask/wqflask/__init__.py b/wqflask/wqflask/__init__.py
index 654fe028..fe66d1b7 100644
--- a/wqflask/wqflask/__init__.py
+++ b/wqflask/wqflask/__init__.py
@@ -11,8 +11,10 @@ from flask_session import Session
 from authlib.integrations.requests_client import OAuth2Session
 from flask import g, Flask, flash, session, url_for, redirect, current_app
 
-
+from base import webqtlConfig
 from utility import formatting
+from utility.hmac import data_hmac, url_for_hmac
+from utility.configuration import tempdir, override_from_envvars
 
 from gn3.authentication import DataRole, AdminRole
 
@@ -22,6 +24,7 @@ from wqflask.group_manager import group_management
 from wqflask.resource_manager import resource_management
 from wqflask.metadata_edits import metadata_edit
 
+from wqflask.top_level_routes import toplevel
 from wqflask.api.markdown import glossary_blueprint
 from wqflask.api.markdown import references_blueprint
 from wqflask.api.markdown import links_blueprint
@@ -58,11 +61,19 @@ app.jinja_env.globals.update(
     logged_in=user_logged_in,
     authserver_authorise_uri=authserver_authorise_uri,
     user_details=user_details,
-    num_collections=num_collections)
+    num_collections=num_collections,
+    url_for_hmac=url_for_hmac,
+    data_hmac=data_hmac)
 
 app.config["SESSION_REDIS"] = redis.from_url(app.config["REDIS_URL"])
 
+# Override settings
+app = override_from_envvars(app)
+app.config["TEMPDIR"] = tempdir(app)
+app = webqtlConfig.init_app(app)
+
 # Registering blueprints
+app.register_blueprint(toplevel)
 app.register_blueprint(glossary_blueprint, url_prefix="/glossary")
 app.register_blueprint(references_blueprint, url_prefix="/references")
 app.register_blueprint(links_blueprint, url_prefix="/links")
diff --git a/wqflask/wqflask/collect.py b/wqflask/wqflask/collect.py
index 8f19b374..dd3f3330 100644
--- a/wqflask/wqflask/collect.py
+++ b/wqflask/wqflask/collect.py
@@ -11,12 +11,11 @@ from flask import url_for
 from flask import request
 from flask import redirect
 from flask import flash
+from flask import Blueprint
 from flask import current_app
 
-from wqflask import app
 from utility import hmac
 from utility.formatting import numify
-from utility.tools import GN_SERVER_URL, TEMPDIR
 from utility.redis_tools import get_redis_conn
 
 from base.trait import create_trait
@@ -35,7 +34,7 @@ from wqflask.oauth2.client import (
 
 
 Redis = get_redis_conn()
-
+collections_bp = Blueprint("collections", __name__)
 
 def process_traits(unprocessed_traits):
     if isinstance(unprocessed_traits, bytes):
@@ -60,7 +59,7 @@ def report_change(len_before, len_now):
             numify(new_length, 'new trait', 'new traits')))
 
 
-@app.route("/collections/store_trait_list", methods=('POST',))
+@collections_bp.route("/collections/store_trait_list", methods=('POST',))
 def store_traits_list():
     params = request.form
 
@@ -72,7 +71,7 @@ def store_traits_list():
     return hash
 
 
-@app.route("/collections/add", methods=["POST"])
+@collections_bp.route("/collections/add", methods=["POST"])
 def collections_add():
     anon_id = session_info()["anon_id"]
     traits = request.args.get("traits", request.form.get("traits"))
@@ -115,7 +114,7 @@ def __compute_traits__(params):
         unprocessed_traits = params['traits']
     return process_traits(unprocessed_traits)
 
-@app.route("/collections/new")
+@collections_bp.route("/collections/new")
 def collections_new():
     params = request.args
     anon_id = session_info()["anon_id"]
@@ -180,7 +179,7 @@ def create_new(collection_name):
     return redirect(url_for('view_collection', uc_id=uc_id))
 
 
-@app.route("/collections/list")
+@collections_bp.route("/collections/list")
 def list_collections():
     params = request.args
     anon_id = session.session_info()["anon_id"]
@@ -200,7 +199,7 @@ def list_collections():
                            **user_collections,
                            **anon_collections)
 
-@app.route("/collections/handle_anonymous", methods=["POST"])
+@collections_bp.route("/collections/handle_anonymous", methods=["POST"])
 def handle_anonymous_collections():
     """Handle any anonymous collection on logging in."""
     choice = request.form.get("anon_choice")
@@ -221,7 +220,7 @@ def handle_anonymous_collections():
             "anon_id": str(session_info()["anon_id"])
         }).either(__impdel_error__, __impdel_success__)
 
-@app.route("/collections/remove", methods=('POST',))
+@collections_bp.route("/collections/remove", methods=('POST',))
 def remove_traits():
     params = request.form
     uc_id = params['uc_id']
@@ -235,7 +234,7 @@ def remove_traits():
         }).either(with_flash_error(resp), with_flash_success(resp))
 
 
-@app.route("/collections/delete", methods=('POST',))
+@collections_bp.route("/collections/delete", methods=('POST',))
 def delete_collection():
     def __error__(err):
         error = process_error(err)
@@ -304,11 +303,12 @@ def trait_info_str(trait):
         trait.name, trait.dataset.name, __trait_desc(trait), __symbol(trait),
         __location(trait), __mean(trait), __lrs(trait), __lrs_location(trait))
 
-@app.route("/collections/import", methods=('POST',))
+@collections_bp.route("/collections/import", methods=('POST',))
 def import_collection():
     import_file = request.files['import_file']
     if import_file.filename != '':
-        file_path = os.path.join(TEMPDIR, import_file.filename)
+        file_path = os.path.join(
+            current_app.config["TEMPDIR"], import_file.filename)
         import_file.save(file_path)
         collection_csv = open(file_path, "r")
         traits = [row.strip() for row in collection_csv if row[0] != "#"]
@@ -319,7 +319,7 @@ def import_collection():
         return render_template(
             "collections/list.html")
 
-@app.route("/collections/view")
+@collections_bp.route("/collections/view")
 def view_collection():
     params = request.args
 
@@ -363,7 +363,8 @@ def view_collection():
         collection_info = dict(
             trait_obs=trait_obs,
             uc=uc,
-            heatmap_data_url=urljoin(GN_SERVER_URL, "heatmaps/clustered"))
+            heatmap_data_url=urljoin(
+                current_app.config["GN_SERVER_URL"], "heatmaps/clustered"))
 
         if "json" in params:
             return json.dumps(json_version)
@@ -381,7 +382,7 @@ def view_collection():
 
     return coll.either(__error__, __view__)
 
-@app.route("/collections/change_name", methods=('POST',))
+@collections_bp.route("/collections/change_name", methods=('POST',))
 def change_collection_name():
     collection_id = request.form['collection_id']
     resp = redirect(url_for("view_collection", uc_id=collection_id))
diff --git a/wqflask/wqflask/correlation/pre_computes.py b/wqflask/wqflask/correlation/pre_computes.py
index 2831bd39..308614f0 100644
--- a/wqflask/wqflask/correlation/pre_computes.py
+++ b/wqflask/wqflask/correlation/pre_computes.py
@@ -3,14 +3,14 @@ import json
 import os
 import hashlib
 import datetime
+from pathlib import Path
 
 import lmdb
 import pickle
-from pathlib import Path
+from flask import current_app as app
 
 from base.data_set import query_table_timestamp
-from base.webqtlConfig import TEXTDIR
-from base.webqtlConfig import TMPDIR
+from utility.configuration import get_setting
 
 from json.decoder import JSONDecodeError
 
@@ -18,7 +18,7 @@ def cache_trait_metadata(dataset_name, data):
 
 
     try:
-        with lmdb.open(os.path.join(TMPDIR,f"metadata_{dataset_name}"),map_size=20971520) as env:
+        with lmdb.open(os.path.join(get_setting(app, "TMPDIR"),f"metadata_{dataset_name}"),map_size=20971520) as env:
             with  env.begin(write=True) as  txn:
                 data_bytes = pickle.dumps(data)
                 txn.put(f"{dataset_name}".encode(), data_bytes)
@@ -31,7 +31,7 @@ def cache_trait_metadata(dataset_name, data):
 
 def read_trait_metadata(dataset_name):
     try:
-        with lmdb.open(os.path.join(TMPDIR,f"metadata_{dataset_name}"),
+        with lmdb.open(os.path.join(get_setting(app, "TMPDIR"),f"metadata_{dataset_name}"),
             readonly=True, lock=False) as env:
             with env.begin() as txn:
                 db_name = txn.get(dataset_name.encode())
@@ -44,7 +44,7 @@ def fetch_all_cached_metadata(dataset_name):
     """in a gvein dataset fetch all the traits metadata"""
     file_name = generate_filename(dataset_name, suffix="metadata")
 
-    file_path = Path(TMPDIR, file_name)
+    file_path = Path(get_setting(app, "TMPDIR"), file_name)
 
     try:
         with open(file_path, "r+") as file_handler:
@@ -84,8 +84,9 @@ def generate_filename(*args, suffix="", file_ext="json"):
 
 
 
-def fetch_text_file(dataset_name, conn, text_dir=TMPDIR):
+def fetch_text_file(dataset_name, conn, text_dir=None):
     """fetch textfiles with strain vals if exists"""
+    text_dir = text_dir or get_setting(app, "TMPDIR")
 
     def __file_scanner__(text_dir, target_file):
         for file in os.listdir(text_dir):
@@ -100,7 +101,8 @@ def fetch_text_file(dataset_name, conn, text_dir=TMPDIR):
         try:
             # checks first for recently generated textfiles if not use gn1 datamatrix
 
-            return __file_scanner__(text_dir, results[0]) or __file_scanner__(TEXTDIR, results[0])
+            return __file_scanner__(text_dir, results[0]) or __file_scanner__(
+                get_setting(app, "WEBQTL_TEXTDIR"), results[0])
 
         except Exception:
             pass
@@ -126,7 +128,8 @@ def read_text_file(sample_dict, file_path):
         return (sample_vals, [[line[i] for i in _posit] for line in csv_reader])
 
 
-def write_db_to_textfile(db_name, conn, text_dir=TMPDIR):
+def write_db_to_textfile(db_name, conn, text_dir=None):
+    text_dir = text_dir or get_setting(app, "TMPDIR")
 
     def __sanitise_filename__(filename):
         ttable = str.maketrans({" ": "_", "/": "_", "\\": "_"})
diff --git a/wqflask/wqflask/correlation_matrix/show_corr_matrix.py b/wqflask/wqflask/correlation_matrix/show_corr_matrix.py
index 617f5c2e..5848a756 100644
--- a/wqflask/wqflask/correlation_matrix/show_corr_matrix.py
+++ b/wqflask/wqflask/correlation_matrix/show_corr_matrix.py
@@ -23,11 +23,11 @@ import random
 import string
 import numpy as np
 import scipy
+from flask import current_app as app
 
 from base.data_set import create_dataset
-from base.webqtlConfig import GENERATED_TEXT_DIR
-
 
+from utility.configuration import get_setting
 from utility.helper_functions import get_trait_db_obs
 from utility.corr_result_helpers import normalize_values
 from utility.redis_tools import get_redis_conn
@@ -225,7 +225,7 @@ def export_corr_matrix(corr_results):
         ''.join(random.choice(string.ascii_uppercase + string.digits)
                 for _ in range(6))
     matrix_export_path = "{}{}.csv".format(
-        GENERATED_TEXT_DIR, corr_matrix_filename)
+        get_setting(app, "WEBQTL_GENERATED_TEXT_DIR"), corr_matrix_filename)
     with open(matrix_export_path, "w+") as output_file:
         output_file.write(
             "Time/Date: " + datetime.datetime.now().strftime("%x / %X") + "\n")
diff --git a/wqflask/wqflask/marker_regression/display_mapping_results.py b/wqflask/wqflask/marker_regression/display_mapping_results.py
index bf89b0db..c544190a 100644
--- a/wqflask/wqflask/marker_regression/display_mapping_results.py
+++ b/wqflask/wqflask/marker_regression/display_mapping_results.py
@@ -35,13 +35,14 @@ import os
 import json
 
 import htmlgen as HT
+from flask import current_app as app
 
 from base import webqtlConfig
 from base.GeneralObject import GeneralObject
 from utility import webqtlUtil
 from utility import Plot
+from utility.configuration import get_setting
 from wqflask.interval_analyst import GeneUtil
-from base.webqtlConfig import GENERATED_IMAGE_DIR
 from utility.pillow_utils import draw_rotated_text, draw_open_polygon
 from wqflask.database import database_connection
 
@@ -607,7 +608,7 @@ class DisplayMappingResults:
         self.filename = webqtlUtil.genRandStr("Itvl_")
         intCanvas.save(
             "{}.png".format(
-                os.path.join(webqtlConfig.GENERATED_IMAGE_DIR, self.filename)),
+                os.path.join(get_setting(app, "WEBQTL_GENERATED_IMAGE_DIR"), self.filename)),
             format='png')
         intImg = HtmlGenWrapper.create_image_tag(
             src="/image/{}.png".format(self.filename),
@@ -622,7 +623,7 @@ class DisplayMappingResults:
                 intCanvasX2, startMb=self.startMb, endMb=self.endMb, showLocusForm=showLocusForm, zoom=2)
             intCanvasX2.save(
                 "{}.png".format(
-                    os.path.join(webqtlConfig.GENERATED_IMAGE_DIR,
+                    os.path.join(get_setting(app, "WEBQTL_GENERATED_IMAGE_DIR"),
                                  self.filename + "X2")),
                 format='png')
 
@@ -3050,7 +3051,7 @@ class DisplayMappingResults:
         filename = webqtlUtil.genRandStr("Reg_")
         Plot.plotBar(myCanvas, perm_output, XLabel=self.LRS_LOD,
                      YLabel='Frequency', title=' Histogram of Permutation Test')
-        myCanvas.save("{}.gif".format(GENERATED_IMAGE_DIR + filename),
+        myCanvas.save("{}.gif".format(get_setting(app, "GENERATED_IMAGE_DIR") + filename),
                       format='gif')
 
         return filename
diff --git a/wqflask/wqflask/marker_regression/plink_mapping.py b/wqflask/wqflask/marker_regression/plink_mapping.py
index 75ee189e..b7a296b7 100644
--- a/wqflask/wqflask/marker_regression/plink_mapping.py
+++ b/wqflask/wqflask/marker_regression/plink_mapping.py
@@ -1,9 +1,11 @@
-import string
 import os
+import string
+
+from flask import current_app as app
 
-from base.webqtlConfig import TMPDIR
 from utility import webqtlUtil
-from utility.tools import flat_files, PLINK_COMMAND
+from utility.configuration import flat_files, get_setting
+from utility.tools import PLINK_COMMAND
 
 
 def run_plink(this_trait, dataset, species, vals, maf):
@@ -11,7 +13,7 @@ def run_plink(this_trait, dataset, species, vals, maf):
         f"{dataset.group.name}_{this_trait.name}_")
     gen_pheno_txt_file(dataset, vals)
 
-    plink_command = f"{PLINK_COMMAND}  --noweb --bfile {flat_files('mapping')}/{dataset.group.name} --no-pheno --no-fid --no-parents --no-sex --maf {maf} --out { TMPDIR}{plink_output_filename} --assoc "
+    plink_command = f"{get_setting(app, 'PLINK_COMMAND')}  --noweb --bfile {flat_files(app, 'mapping')}/{dataset.group.name} --no-pheno --no-fid --no-parents --no-sex --maf {maf} --out { get_setting(app, 'TMPDIR')}{plink_output_filename} --assoc "
 
     os.system(plink_command)
 
@@ -26,12 +28,12 @@ def gen_pheno_txt_file(this_dataset, vals):
     """Generates phenotype file for GEMMA/PLINK"""
 
     current_file_data = []
-    with open(f"{flat_files('mapping')}/{this_dataset.group.name}.fam", "r") as outfile:
+    with open(f"{flat_files(app, 'mapping')}/{this_dataset.group.name}.fam", "r") as outfile:
         for i, line in enumerate(outfile):
             split_line = line.split()
             current_file_data.append(split_line)
 
-    with open(f"{flat_files('mapping')}/{this_dataset.group.name}.fam", "w") as outfile:
+    with open(f"{flat_files(app, 'mapping')}/{this_dataset.group.name}.fam", "w") as outfile:
         for i, line in enumerate(current_file_data):
             if vals[i] == "x":
                 this_val = -9
@@ -43,7 +45,7 @@ def gen_pheno_txt_file(this_dataset, vals):
 
 def gen_pheno_txt_file_plink(this_trait, dataset, vals, pheno_filename=''):
     ped_sample_list = get_samples_from_ped_file(dataset)
-    output_file = open(f"{TMPDIR}{pheno_filename}.txt", "wb")
+    output_file = open(f"{get_setting(app, 'TMPDIR')}{pheno_filename}.txt", "wb")
     header = f"FID\tIID\t{this_trait.name}\n"
     output_file.write(header)
 
@@ -79,7 +81,7 @@ def gen_pheno_txt_file_plink(this_trait, dataset, vals, pheno_filename=''):
 
 
 def get_samples_from_ped_file(dataset):
-    ped_file = open(f"{flat_files('mapping')}{dataset.group.name}.ped", "r")
+    ped_file = open(f"{flat_files(app, 'mapping')}{dataset.group.name}.ped", "r")
     line = ped_file.readline()
     sample_list = []
 
@@ -100,7 +102,7 @@ def parse_plink_output(output_filename, species):
 
     threshold_p_value = 1
 
-    result_fp = open(f"{TMPDIR}{output_filename}.qassoc", "rb")
+    result_fp = open(f"{get_setting(app, 'TMPDIR')}{output_filename}.qassoc", "rb")
 
     line = result_fp.readline()
 
diff --git a/wqflask/wqflask/marker_regression/rqtl_mapping.py b/wqflask/wqflask/marker_regression/rqtl_mapping.py
index 9a42bc35..bd3fdbc8 100644
--- a/wqflask/wqflask/marker_regression/rqtl_mapping.py
+++ b/wqflask/wqflask/marker_regression/rqtl_mapping.py
@@ -10,11 +10,11 @@ from typing import Optional
 from typing import TextIO
 
 import numpy as np
+from flask import current_app as app
 
-from base.webqtlConfig import TMPDIR
 from base.trait import create_trait
 from utility.redis_tools import get_redis_conn
-from utility.tools import locate, GN3_LOCAL_URL
+from utility.configuration import get_setting, locate
 from wqflask.database import database_connection
 
 
@@ -23,9 +23,9 @@ def run_rqtl(trait_name, vals, samples, dataset, pair_scan, mapping_scale, model
 
     pheno_file = write_phenotype_file(trait_name, samples, vals, dataset, cofactors, perm_strata_list)
     if dataset.group.genofile:
-        geno_file = locate(dataset.group.genofile, "genotype")
+        geno_file = locate(app, dataset.group.genofile, "genotype")
     else:
-        geno_file = locate(dataset.group.name + ".geno", "genotype")
+        geno_file = locate(app, dataset.group.name + ".geno", "genotype")
 
     post_data = {
         "pheno_file": pheno_file,
@@ -54,7 +54,7 @@ def run_rqtl(trait_name, vals, samples, dataset, pair_scan, mapping_scale, model
     if perm_strata_list:
         post_data["pstrata"] = True
 
-    rqtl_output = requests.post(GN3_LOCAL_URL + "api/rqtl/compute", data=post_data).json()
+    rqtl_output = requests.post(get_setting(app, "GN3_LOCAL_URL") + "api/rqtl/compute", data=post_data).json()
     if num_perm > 0:
         return rqtl_output['perm_results'], rqtl_output['suggestive'], rqtl_output['significant'], rqtl_output['results']
     else:
@@ -90,7 +90,7 @@ def write_covarstruct_file(cofactors: str) -> str:
         writer.writerow([cofactor_name, datatype])
 
     hash_of_file = get_hash_of_textio(covar_struct_file)
-    file_path = TMPDIR + hash_of_file + ".csv"
+    file_path = get_setting(app, "WEBQTL_TMPDIR") + hash_of_file + ".csv"
 
     with open(file_path, "w") as fd:
         covar_struct_file.seek(0)
@@ -133,7 +133,7 @@ def write_phenotype_file(trait_name: str,
         writer.writerow(this_row)
 
     hash_of_file = get_hash_of_textio(pheno_file)
-    file_path = TMPDIR + hash_of_file + ".csv"
+    file_path = get_setting(app, "WEBQTL_TMPDIR") + hash_of_file + ".csv"
 
     with open(file_path, "w") as fd:
         pheno_file.seek(0)
diff --git a/wqflask/wqflask/marker_regression/run_mapping.py b/wqflask/wqflask/marker_regression/run_mapping.py
index 952d9749..a478f875 100644
--- a/wqflask/wqflask/marker_regression/run_mapping.py
+++ b/wqflask/wqflask/marker_regression/run_mapping.py
@@ -23,7 +23,7 @@ import simplejson as json
 from redis import Redis
 Redis = Redis()
 
-from flask import Flask, g
+from flask import g, Flask, current_app as app
 
 from base.trait import GeneralTrait
 from base import data_set
@@ -35,9 +35,7 @@ from wqflask.database import database_connection
 from wqflask.marker_regression import gemma_mapping, rqtl_mapping, qtlreaper_mapping, plink_mapping
 from wqflask.show_trait.SampleList import SampleList
 
-from utility.tools import locate, locate_ignore_error, GEMMA_COMMAND, PLINK_COMMAND, TEMPDIR
 from utility.external import shell
-from base.webqtlConfig import TMPDIR, GENERATED_TEXT_DIR
 
 Redis = get_redis_conn()
 
@@ -467,7 +465,7 @@ class RunMapping:
             self.this_trait, self.dataset, self.vals, pheno_filename=output_filename)
 
         rqtl_command = './plink --noweb --ped %s.ped --no-fid --no-parents --no-sex --no-pheno --map %s.map --pheno %s/%s.txt --pheno-name %s --maf %s --missing-phenotype -9999 --out %s%s --assoc ' % (
-            self.dataset.group.name, self.dataset.group.name, TMPDIR, plink_output_filename, self.this_trait.name, self.maf, TMPDIR, plink_output_filename)
+            self.dataset.group.name, self.dataset.group.name, get_setting(app, "WEBQTL_TMPDIR"), plink_output_filename, self.this_trait.name, self.maf, get_setting(app, "WEBQTL_TMPDIR"), plink_output_filename)
 
         os.system(rqtl_command)
 
@@ -646,8 +644,8 @@ def write_input_for_browser(this_dataset, gwas_results, annotations):
                 for _ in range(6))
     gwas_filename = file_base + "_GWAS"
     annot_filename = file_base + "_ANNOT"
-    gwas_path = "{}/gn2/".format(TEMPDIR) + gwas_filename
-    annot_path = "{}/gn2/".format(TEMPDIR) + annot_filename
+    gwas_path = "{}/gn2/".format(get_setting(app, "TEMPDIR")) + gwas_filename
+    annot_path = "{}/gn2/".format(get_setting(app, "TEMPDIR")) + annot_filename
 
     with open(gwas_path + ".json", "w") as gwas_file, open(annot_path + ".json", "w") as annot_file:
         gwas_file.write(json.dumps(gwas_results))
diff --git a/wqflask/wqflask/partial_correlations_views.py b/wqflask/wqflask/partial_correlations_views.py
index a11d902c..f5f88147 100644
--- a/wqflask/wqflask/partial_correlations_views.py
+++ b/wqflask/wqflask/partial_correlations_views.py
@@ -10,14 +10,16 @@ from flask import (
     request,
     url_for,
     redirect,
+    Blueprint,
     current_app,
     render_template)
 
-from wqflask import app
-from utility.tools import GN_SERVER_URL
+from utility.configuration import get_setting
 from wqflask.database import database_connection
 from gn3.db.partial_correlations import traits_info
 
+pcorrs_bp = Blueprint("partial_correlations", __name__)
+
 def publish_target_databases(conn, groups, threshold):
     query = (
         "SELECT PublishFreeze.FullName,PublishFreeze.Name "
@@ -266,7 +268,7 @@ def handle_response(response):
             message = response_error_message(response))
     return handle_200_response(response.json())
 
-@app.route("/partial_correlations", methods=["POST"])
+@pcorrs_bp.route("/partial_correlations", methods=["POST"])
 def partial_correlations():
     form = request.form
     traits = tuple(
@@ -288,7 +290,8 @@ def partial_correlations():
                 "with_target_db": args["with_target_db"]
             }
             return handle_response(requests.post(
-                url=urljoin(GN_SERVER_URL, "correlation/partial"),
+                url=urljoin(get_setting(current_app, "GN_SERVER_URL"),
+                            "correlation/partial"),
                 json=post_data))
 
         for error in args["errors"]:
@@ -303,7 +306,8 @@ def partial_correlations():
                 "with_target_db": args["with_target_db"]
             }
             return handle_response(requests.post(
-                url=urljoin(GN_SERVER_URL, "correlation/partial"),
+                url=urljoin(get_setting(current_app, "GN_SERVER_URL"),
+                            "correlation/partial"),
                 json=post_data))
 
         for error in args["errors"]:
@@ -345,10 +349,11 @@ def process_pcorrs_command_output(result):
         return render_error(
             f"({result['error_type']}: {result['message']})")
 
-@app.route("/partial_correlations/<command_id>", methods=["GET"])
+@pcorrs_bp.route("/partial_correlations/<command_id>", methods=["GET"])
 def poll_partial_correlation_results(command_id):
     response = requests.get(
-        url=urljoin(GN_SERVER_URL, f"async_commands/state/{command_id}"))
+        url=urljoin(get_setting(current_app, "GN_SERVER_URL"),
+                    f"async_commands/state/{command_id}"))
 
     if response.status_code == 200:
         data = response.json()
diff --git a/wqflask/wqflask/search_results.py b/wqflask/wqflask/search_results.py
index 6222dd88..d057ee59 100644
--- a/wqflask/wqflask/search_results.py
+++ b/wqflask/wqflask/search_results.py
@@ -6,7 +6,7 @@ import re
 
 import json
 
-from flask import g
+from flask import g, current_app
 
 from base.data_set import create_dataset
 from base.webqtlConfig import PUBMEDLINK_URL
@@ -17,7 +17,6 @@ from wqflask.database import database_connection
 
 from utility import hmac
 from utility.authentication_tools import check_resource_availability
-from utility.tools import GN2_BASE_URL
 from utility.type_checking import is_str
 
 
@@ -411,7 +410,8 @@ def get_alias_terms(symbol, species):
 
     filtered_aliases = []
     response = requests.get(
-        GN2_BASE_URL + "/gn3/gene/aliases/" + symbol_string)
+        current_app.config["GN2_BASE_URL"] + "/gn3/gene/aliases/" +
+        symbol_string)
     if response:
         alias_list = json.loads(response.content)
 
diff --git a/wqflask/wqflask/top_level_routes.py b/wqflask/wqflask/top_level_routes.py
new file mode 100644
index 00000000..73a91dfc
--- /dev/null
+++ b/wqflask/wqflask/top_level_routes.py
@@ -0,0 +1,16 @@
+"""Top-Level routes."""
+from flask import Blueprint
+
+# from .views import main_views
+from base.trait import trait_bp
+from .collect import collections_bp
+from .partial_correlations_views import pcorrs_bp
+
+# oauth2 = Blueprint("oauth2", __name__, template_folder="templates/oauth2")
+
+toplevel = Blueprint("toplevel", __name__)
+
+toplevel.register_blueprint(trait_bp)
+toplevel.register_blueprint(pcorrs_bp)
+# toplevel.register_blueprint(main_views)
+toplevel.register_blueprint(collections_bp)
diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py
index 7cbcb3b8..bb0e408f 100644
--- a/wqflask/wqflask/views.py
+++ b/wqflask/wqflask/views.py
@@ -25,10 +25,8 @@ from uuid import UUID
 
 from urllib.parse import urljoin
 
-from wqflask import app
-
 from gn3.computations.gemma import generate_hash_of_string
-from flask import current_app
+from flask import current_app as app
 from flask import g
 from flask import Response
 from flask import request
@@ -40,10 +38,10 @@ from flask import send_file
 from flask import url_for
 from flask import flash
 from flask import session
+from flask import Blueprint
 
 # Some of these (like collect) might contain endpoints, so they're still used.
 # Blueprints should probably be used instead.
-from wqflask import collect
 from wqflask import search_results
 from wqflask import server_side
 from base.data_set import create_dataset  # Used by YAML in marker_regression
@@ -95,9 +93,6 @@ from utility.redis_tools import get_redis_conn
 
 import utility.hmac as hmac
 
-from base.webqtlConfig import TMPDIR
-from base.webqtlConfig import GENERATED_IMAGE_DIR
-
 from wqflask.database import database_connection
 
 import jobs.jobs as jobs
@@ -107,13 +102,15 @@ from wqflask.oauth2.checks import user_logged_in
 
 Redis = get_redis_conn()
 
+main_views = Blueprint("main_views", __name__)
+
 
-@app.route("/authentication_needed")
+@main_views.route("/authentication_needed")
 def no_access_page():
     return render_template("new_security/not_authenticated.html")
 
 
-@app.route("/")
+@main_views.route("/")
 def index_page():
     anon_id = session_info()["anon_id"]
     def __render__(colls):
@@ -129,10 +126,10 @@ def index_page():
             __render__)
 
 
-@app.route("/tmp/<img_path>")
+@main_views.route("/tmp/<img_path>")
 def tmp_page(img_path):
     initial_start_vars = request.form
-    imgfile = open(GENERATED_IMAGE_DIR + img_path, 'rb')
+    imgfile = open(get_setting(app, "WEBQTL_GENERATED_IMAGE_DIR") + img_path, 'rb')
     imgdata = imgfile.read()
     imgB64 = base64.b64encode(imgdata)
     bytesarray = array.array('B', imgB64)
@@ -140,7 +137,7 @@ def tmp_page(img_path):
                            img_base64=bytesarray)
 
 
-@app.route("/js/<path:filename>")
+@main_views.route("/js/<path:filename>")
 def js(filename):
     js_path = JS_GUIX_PATH
     name = filename
@@ -150,7 +147,7 @@ def js(filename):
     return send_from_directory(js_path, name)
 
 
-@app.route("/css/<path:filename>")
+@main_views.route("/css/<path:filename>")
 def css(filename):
     js_path = JS_GUIX_PATH
     name = filename
@@ -160,12 +157,12 @@ def css(filename):
     return send_from_directory(js_path, name)
 
 
-@app.route("/twitter/<path:filename>")
+@main_views.route("/twitter/<path:filename>")
 def twitter(filename):
     return send_from_directory(JS_TWITTER_POST_FETCHER_PATH, filename)
 
 
-@app.route("/search", methods=('GET',))
+@main_views.route("/search", methods=('GET',))
 def search_page():
     result = None
     if USE_REDIS:
@@ -186,7 +183,7 @@ def search_page():
         return render_template("search_error.html")
 
 
-@app.route("/search_table", methods=('GET',))
+@main_views.route("/search_table", methods=('GET',))
 def search_page_table():
     the_search = search_results.SearchResultPage(request.args)
     current_page = server_side.ServerSideTable(
@@ -199,7 +196,7 @@ def search_page_table():
     return flask.jsonify(current_page)
 
 
-@app.route("/gsearch", methods=('GET',))
+@main_views.route("/gsearch", methods=('GET',))
 def gsearchact():
     result = GSearch(request.args).__dict__
     type = request.args['type']
@@ -209,7 +206,7 @@ def gsearchact():
         return render_template("gsearch_pheno.html", **result)
 
 
-@app.route("/gsearch_table", methods=('GET',))
+@main_views.route("/gsearch_table", methods=('GET',))
 def gsearchtable():
     gsearch_table_data = GSearch(request.args)
     current_page = server_side.ServerSideTable(
@@ -222,13 +219,13 @@ def gsearchtable():
     return flask.jsonify(current_page)
 
 
-@app.route("/gsearch_updating", methods=('POST',))
+@main_views.route("/gsearch_updating", methods=('POST',))
 def gsearch_updating():
     result = UpdateGSearch(request.args).__dict__
     return result['results']
 
 
-@app.route("/docedit")
+@main_views.route("/docedit")
 def docedit():
     try:
         if g.user_session.record['user_email_address'] == "zachary.a.sloan@gmail.com" or g.user_session.record['user_email_address'] == "labwilliams@gmail.com":
@@ -240,45 +237,45 @@ def docedit():
         return "You shouldn't be here!"
 
 
-@app.route('/generated/<filename>')
+@main_views.route('/generated/<filename>')
 def generated_file(filename):
-    return send_from_directory(GENERATED_IMAGE_DIR, filename)
+    return send_from_directory(get_setting(app, "WEBQTL_GENERATED_IMAGE_DIR"), filename)
 
 
-@app.route("/help")
+@main_views.route("/help")
 def help():
     doc = Docs("help", request.args)
     return render_template("docs.html", **doc.__dict__)
 
 
-@app.route("/wgcna_setup", methods=('POST',))
+@main_views.route("/wgcna_setup", methods=('POST',))
 def wcgna_setup():
     # We are going to get additional user input for the analysis
     # Display them using the template
     return render_template("wgcna_setup.html", **request.form)
 
 
-@app.route("/wgcna_results", methods=('POST',))
+@main_views.route("/wgcna_results", methods=('POST',))
 def wcgna_results():
     """call the gn3 api to get wgcna response data"""
     results = run_wgcna(dict(request.form))
     return render_template("gn3_wgcna_results.html", **results)
 
 
-@app.route("/ctl_setup", methods=('POST',))
+@main_views.route("/ctl_setup", methods=('POST',))
 def ctl_setup():
     # We are going to get additional user input for the analysis
     # Display them using the template
     return render_template("ctl_setup.html", **request.form)
 
 
-@app.route("/ctl_results", methods=["POST"])
+@main_views.route("/ctl_results", methods=["POST"])
 def ctl_results():
     ctl_results = run_ctl(request.form)
     return render_template("gn3_ctl_results.html", **ctl_results)
 
 
-@app.route("/ctl_network_files/<file_name>/<file_type>")
+@main_views.route("/ctl_network_files/<file_name>/<file_type>")
 def fetch_network_files(file_name, file_type):
     file_path = f"{file_name}.{file_type}"
 
@@ -287,30 +284,30 @@ def fetch_network_files(file_name, file_type):
     return send_file(file_path)
 
 
-@app.route("/intro")
+@main_views.route("/intro")
 def intro():
     doc = Docs("intro", request.args)
     return render_template("docs.html", **doc.__dict__)
 
 
-@app.route("/tutorials")
+@main_views.route("/tutorials")
 def tutorials():
     return render_template("tutorials.html")
 
 
-@app.route("/credits")
+@main_views.route("/credits")
 def credits():
     return render_template("credits.html")
 
 
-@app.route("/update_text", methods=('POST',))
+@main_views.route("/update_text", methods=('POST',))
 def update_page():
     update_text(request.form)
     doc = Docs(request.form['entry_type'], request.form)
     return render_template("docs.html", **doc.__dict__)
 
 
-@app.route("/submit_trait")
+@main_views.route("/submit_trait")
 def submit_trait_form():
     species_and_groups = get_species_groups()
     return render_template(
@@ -320,13 +317,13 @@ def submit_trait_form():
         version=GN_VERSION)
 
 
-@app.route("/create_temp_trait", methods=('POST',))
+@main_views.route("/create_temp_trait", methods=('POST',))
 def create_temp_trait():
     doc = Docs("links")
     return render_template("links.html", **doc.__dict__)
 
 
-@app.route('/export_trait_excel', methods=('POST',))
+@main_views.route('/export_trait_excel', methods=('POST',))
 def export_trait_excel():
     """Excel file consisting of the sample data from the trait data and analysis page"""
     trait_name, sample_data = export_trait_data.export_sample_table(
@@ -347,7 +344,7 @@ def export_trait_excel():
                     headers={"Content-Disposition": "attachment;filename=" + trait_name + ".xlsx"})
 
 
-@app.route('/export_trait_csv', methods=('POST',))
+@main_views.route('/export_trait_csv', methods=('POST',))
 def export_trait_csv():
     """CSV file consisting of the sample data from the trait data and analysis page"""
     trait_name, sample_data = export_trait_data.export_sample_table(
@@ -365,7 +362,7 @@ def export_trait_csv():
                     headers={"Content-Disposition": "attachment;filename=" + trait_name + ".csv"})
 
 
-@app.route('/export_traits_csv', methods=('POST',))
+@main_views.route('/export_traits_csv', methods=('POST',))
 def export_traits_csv():
     """CSV file consisting of the traits from the search result page"""
     file_list = export_traits(request.form, "metadata")
@@ -388,7 +385,7 @@ def export_traits_csv():
                         headers={"Content-Disposition": "attachment;filename=" + file_list[0][0]})
 
 
-@app.route('/export_collection', methods=('POST',))
+@main_views.route('/export_collection', methods=('POST',))
 def export_collection_csv():
     """CSV file consisting of trait list so collections can be exported/shared"""
     out_file = export_traits(request.form, "collection")
@@ -397,7 +394,7 @@ def export_collection_csv():
                     headers={"Content-Disposition": "attachment;filename=" + out_file[0] + ".csv"})
 
 
-@app.route('/export_perm_data', methods=('POST',))
+@main_views.route('/export_perm_data', methods=('POST',))
 def export_perm_data():
     """CSV file consisting of the permutation data for the mapping results"""
     perm_info = json.loads(request.form['perm_info'])
@@ -444,7 +441,7 @@ def export_perm_data():
                     headers={"Content-Disposition": "attachment;filename=" + file_name + ".csv"})
 
 
-@app.route("/show_temp_trait", methods=('POST',))
+@main_views.route("/show_temp_trait", methods=('POST',))
 def show_temp_trait_page():
     with database_connection() as conn, conn.cursor() as cursor:
         user_id = ((g.user_session.record.get(b"user_id") or b"").decode("utf-8")
@@ -458,7 +455,7 @@ def show_temp_trait_page():
         return render_template("show_trait.html", **template_vars.__dict__)
 
 
-@app.route("/show_trait")
+@main_views.route("/show_trait")
 def show_trait_page():
     def __show_trait__(privileges_data):
         assert len(privileges_data) == 1
@@ -496,7 +493,7 @@ def show_trait_page():
         }).either(__failure__, __show_trait__)
 
 
-@app.route("/heatmap", methods=('POST',))
+@main_views.route("/heatmap", methods=('POST',))
 def heatmap_page():
     start_vars = request.form
     temp_uuid = uuid.uuid4()
@@ -532,7 +529,7 @@ def heatmap_page():
     return rendered_template
 
 
-@app.route("/bnw_page", methods=('POST',))
+@main_views.route("/bnw_page", methods=('POST',))
 def bnw_page():
     start_vars = request.form
 
@@ -549,7 +546,7 @@ def bnw_page():
     return rendered_template
 
 
-@app.route("/webgestalt_page", methods=('POST',))
+@main_views.route("/webgestalt_page", methods=('POST',))
 def webgestalt_page():
     start_vars = request.form
 
@@ -566,7 +563,7 @@ def webgestalt_page():
     return rendered_template
 
 
-@app.route("/geneweaver_page", methods=('POST',))
+@main_views.route("/geneweaver_page", methods=('POST',))
 def geneweaver_page():
     start_vars = request.form
 
@@ -583,7 +580,7 @@ def geneweaver_page():
     return rendered_template
 
 
-@app.route("/comparison_bar_chart", methods=('POST',))
+@main_views.route("/comparison_bar_chart", methods=('POST',))
 def comp_bar_chart_page():
     start_vars = request.form
 
@@ -604,12 +601,12 @@ def comp_bar_chart_page():
     return rendered_template
 
 
-@app.route("/mapping_results_container")
+@main_views.route("/mapping_results_container")
 def mapping_results_container_page():
     return render_template("mapping_results_container.html")
 
 
-@app.route("/loading", methods=('POST',))
+@main_views.route("/loading", methods=('POST',))
 def loading_page():
     initial_start_vars = request.form
     start_vars_container = {}
@@ -666,7 +663,7 @@ def loading_page():
     return rendered_template
 
 
-@app.route("/run_mapping", methods=('POST','GET'))
+@main_views.route("/run_mapping", methods=('POST','GET'))
 def mapping_results_page():
     if request.method == "GET" and (hash_of_inputs := request.args.get("hash")):
         hash_of_inputs = request.args.get("hash")
@@ -791,7 +788,7 @@ def mapping_results_page():
 
     return rendered_template
 
-@app.route("/cache_mapping_inputs", methods=('POST',))
+@main_views.route("/cache_mapping_inputs", methods=('POST',))
 def cache_mapping_inputs():
     ONE_MONTH = 60 * 60 * 24 * 30
     cache_id = request.form.get("inputs_hash")
@@ -800,7 +797,7 @@ def cache_mapping_inputs():
 
     return "Success"
 
-@app.route("/export_mapping_results", methods=('POST',))
+@main_views.route("/export_mapping_results", methods=('POST',))
 def export_mapping_results():
     file_path = request.form.get("results_path")
     results_csv = open(file_path, "r").read()
@@ -811,7 +808,7 @@ def export_mapping_results():
     return response
 
 
-@app.route("/export_corr_matrix", methods=('POST',))
+@main_views.route("/export_corr_matrix", methods=('POST',))
 def export_corr_matrix():
     file_path = request.form.get("export_filepath")
     file_name = request.form.get("export_filename")
@@ -823,7 +820,7 @@ def export_corr_matrix():
     return response
 
 
-@app.route("/export", methods=('POST',))
+@main_views.route("/export", methods=('POST',))
 def export():
     svg_xml = request.form.get("data", "Invalid data")
     filename = request.form.get("filename", "manhattan_plot_snp")
@@ -832,7 +829,7 @@ def export():
     return response
 
 
-@app.route("/export_pdf", methods=('POST',))
+@main_views.route("/export_pdf", methods=('POST',))
 def export_pdf():
     import cairosvg
     svg_xml = request.form.get("data", "Invalid data")
@@ -843,7 +840,7 @@ def export_pdf():
     return response
 
 
-@app.route("/network_graph", methods=('POST',))
+@main_views.route("/network_graph", methods=('POST',))
 def network_graph_page():
     start_vars = request.form
     traits = [trait.strip() for trait in start_vars['trait_list'].split(',')]
@@ -867,13 +864,13 @@ def __handle_correlation_error__(exc):
             "error-message": exc.args[0]
         })
 
-@app.route("/corr_compute", methods=('POST', 'GET'))
+@main_views.route("/corr_compute", methods=('POST', 'GET'))
 def corr_compute_page():
     with Redis.from_url(REDIS_URL, decode_responses=True) as rconn:
         if request.method == "POST":
             request_received = datetime.datetime.utcnow()
             filename=hmac.hmac_creation(f"request_form_{request_received.isoformat()}")
-            filepath = f"{TMPDIR}{filename}"
+            filepath = f"{get_setting(app, 'TMPDIR')}{filename}"
             with open(filepath, "wb") as pfile:
                 pickle.dump(request.form, pfile, protocol=pickle.HIGHEST_PROTOCOL)
                 job_id = jobs.queue(
@@ -912,7 +909,7 @@ def corr_compute_page():
         return render_template("loading_corrs.html")
 
 
-@app.route("/corr_matrix", methods=('POST',))
+@main_views.route("/corr_matrix", methods=('POST',))
 def corr_matrix_page():
     start_vars = request.form
     traits = [trait.strip() for trait in start_vars['trait_list'].split(',')]
@@ -927,7 +924,7 @@ def corr_matrix_page():
         return render_template("empty_collection.html", **{'tool': 'Correlation Matrix'})
 
 
-@app.route("/corr_scatter_plot")
+@main_views.route("/corr_scatter_plot")
 def corr_scatter_plot_page():
     template_vars = corr_scatter_plot.CorrScatterPlot(request.args)
     template_vars.js_data = json.dumps(template_vars.js_data,
@@ -936,21 +933,21 @@ def corr_scatter_plot_page():
     return render_template("corr_scatterplot.html", **template_vars.__dict__)
 
 
-@app.route("/snp_browser", methods=('GET',))
+@main_views.route("/snp_browser", methods=('GET',))
 def snp_browser_page():
     with database_connection() as conn, conn.cursor() as cursor:
         template_vars = snp_browser.SnpBrowser(cursor, request.args)
         return render_template("snp_browser.html", **template_vars.__dict__)
 
 
-@app.route("/db_info", methods=('GET',))
+@main_views.route("/db_info", methods=('GET',))
 def db_info_page():
     template_vars = InfoPage(request.args)
 
     return render_template("info_page.html", **template_vars.__dict__)
 
 
-@app.route("/snp_browser_table", methods=('GET',))
+@main_views.route("/snp_browser_table", methods=('GET',))
 def snp_browser_table():
     with database_connection() as conn, conn.cursor() as cursor:
         snp_table_data = snp_browser.SnpBrowser(cursor, request.args)
@@ -964,32 +961,32 @@ def snp_browser_table():
         return flask.jsonify(current_page)
 
 
-@app.route("/tutorial/WebQTLTour", methods=('GET',))
+@main_views.route("/tutorial/WebQTLTour", methods=('GET',))
 def tutorial_page():
     # ZS: Currently just links to GN1
     return redirect("http://gn1.genenetwork.org/tutorial/WebQTLTour/")
 
 
-@app.route("/tutorial/security", methods=('GET',))
+@main_views.route("/tutorial/security", methods=('GET',))
 def security_tutorial_page():
     # ZS: Currently just links to GN1
     return render_template("admin/security_help.html")
 
 
-@app.route("/submit_bnw", methods=('POST',))
+@main_views.route("/submit_bnw", methods=('POST',))
 def submit_bnw():
     return render_template("empty_collection.html", **{'tool': 'Correlation Matrix'})
 
 # Take this out or secure it before putting into production
 
 
-@app.route("/get_temp_data")
+@main_views.route("/get_temp_data")
 def get_temp_data():
     temp_uuid = request.args['key']
     return flask.jsonify(temp_data.TempData(temp_uuid).get_all())
 
 
-@app.route("/browser_input", methods=('GET',))
+@main_views.route("/browser_input", methods=('GET',))
 def browser_inputs():
     """  Returns JSON from tmp directory for the purescript genome browser"""
 
@@ -1017,10 +1014,10 @@ def json_default_handler(obj):
             type(obj), repr(obj)))
 
 
-@app.route("/admin/data-sample/diffs/")
+@main_views.route("/admin/data-sample/diffs/")
 @edit_access_required
 def display_diffs_admin():
-    TMPDIR = current_app.config.get("TMPDIR")
+    TMPDIR = app.config.get("TMPDIR")
     DIFF_DIR = f"{TMPDIR}/sample-data/diffs"
     files = []
     if os.path.exists(DIFF_DIR):
@@ -1031,9 +1028,9 @@ def display_diffs_admin():
                            files=files)
 
 
-@app.route("/user/data-sample/diffs/")
+@main_views.route("/user/data-sample/diffs/")
 def display_diffs_users():
-    TMPDIR = current_app.config.get("TMPDIR")
+    TMPDIR = app.config.get("TMPDIR")
     DIFF_DIR = f"{TMPDIR}/sample-data/diffs"
     files = []
     author = g.user_session.record.get(b'user_name').decode("utf-8")
@@ -1046,7 +1043,7 @@ def display_diffs_users():
                            files=files)
 
 
-@app.route("/genewiki/<symbol>")
+@main_views.route("/genewiki/<symbol>")
 def display_generif_page(symbol):
     """Fetch GeneRIF metadata from GN3 and display it"""
     entries = requests.get(
@@ -1062,7 +1059,7 @@ def display_generif_page(symbol):
     )
 
 
-@app.route("/dataset/<name>", methods=('GET',))
+@main_views.route("/dataset/<name>", methods=('GET',))
 def get_dataset(name):
     metadata = requests.get(
         urljoin(
@@ -1085,7 +1082,7 @@ def get_dataset(name):
     )
 
 
-@app.route("/publication/<name>", methods=('GET',))
+@main_views.route("/publication/<name>", methods=('GET',))
 def get_publication(name):
     metadata = requests.get(
         urljoin(
@@ -1098,7 +1095,7 @@ def get_publication(name):
     )
 
 
-@app.route("/phenotype/<name>", methods=('GET',))
+@main_views.route("/phenotype/<name>", methods=('GET',))
 def get_phenotype(name):
     metadata = requests.get(
         urljoin(