about summary refs log tree commit diff
path: root/wqflask/base/data_set
diff options
context:
space:
mode:
Diffstat (limited to 'wqflask/base/data_set')
-rw-r--r--wqflask/base/data_set/__init__.py10
-rw-r--r--wqflask/base/data_set/datasetgroup.py22
-rw-r--r--wqflask/base/data_set/datasettype.py7
-rw-r--r--wqflask/base/data_set/markers.py10
-rw-r--r--wqflask/base/data_set/utils.py10
5 files changed, 33 insertions, 26 deletions
diff --git a/wqflask/base/data_set/__init__.py b/wqflask/base/data_set/__init__.py
index e49c6a93..ad51e47e 100644
--- a/wqflask/base/data_set/__init__.py
+++ b/wqflask/base/data_set/__init__.py
@@ -6,11 +6,14 @@ import pickle as pickle
 
 # 3rd-party imports
 from redis import Redis
+from flask import current_app as app
 
 # local imports
-from .dataset import DataSet
 from base import webqtlConfig
-from utility.tools import USE_REDIS
+from wqflask.database import database_connection
+from utility.configuration import get_setting_bool
+
+from .dataset import DataSet
 from .datasettype import DatasetType
 from .tempdataset import TempDataSet
 from .datasetgroup import DatasetGroup
@@ -18,7 +21,6 @@ from .utils import query_table_timestamp
 from .genotypedataset import GenotypeDataSet
 from .phenotypedataset import PhenotypeDataSet
 from .mrnaassaydataset import MrnaAssayDataSet
-from wqflask.database import database_connection
 
 # Used by create_database to instantiate objects
 # Each subclass will add to this
@@ -113,7 +115,7 @@ def datasets(group_name, this_group=None, redis_conn=Redis()):
                 dataset_menu.append(dict(tissue=tissue_name,
                                          datasets=[(dataset, dataset_short)]))
 
-    if USE_REDIS:
+    if get_setting_bool("USE_REDIS"):
         redis_conn.set(key, pickle.dumps(dataset_menu, pickle.HIGHEST_PROTOCOL))
         redis_conn.expire(key, 60 * 5)
 
diff --git a/wqflask/base/data_set/datasetgroup.py b/wqflask/base/data_set/datasetgroup.py
index 72577f38..90c59a1e 100644
--- a/wqflask/base/data_set/datasetgroup.py
+++ b/wqflask/base/data_set/datasetgroup.py
@@ -3,6 +3,7 @@
 import os
 import json
 
+from flask import current_app as app
 
 from base import webqtlConfig
 from .markers import Markers, HumanMarkers
@@ -11,11 +12,11 @@ from utility import gen_geno_ob
 from db import webqtlDatabaseFunction
 from maintenance import get_group_samplelists
 from wqflask.database import database_connection
-from utility.tools import (
+from utility.configuration import (
     locate,
-    USE_REDIS,
     flat_files,
     flat_file_exists,
+    get_setting_bool,
     locate_ignore_error)
 
 class DatasetGroup:
@@ -87,8 +88,8 @@ class DatasetGroup:
 
     def get_markers(self):
         def check_plink_gemma():
-            if flat_file_exists("mapping"):
-                MAPPING_PATH = flat_files("mapping") + "/"
+            if flat_file_exists(app, "mapping"):
+                MAPPING_PATH = flat_files(app, "mapping") + "/"
                 if os.path.isfile(MAPPING_PATH + self.name + ".bed"):
                     return True
             return False
@@ -117,6 +118,7 @@ class DatasetGroup:
 
     def get_study_samplelists(self):
         study_sample_file = locate_ignore_error(
+            app,
             self.name + ".json", 'study_sample_lists')
         try:
             f = open(study_sample_file)
@@ -137,13 +139,15 @@ class DatasetGroup:
     def get_samplelist(self, redis_conn):
         result = None
         key = "samplelist:v3:" + self.name
+        USE_REDIS = get_setting_bool(app, "USE_REDIS")
         if USE_REDIS:
             result = redis_conn.get(key)
 
         if result is not None:
             self.samplelist = json.loads(result)
         else:
-            genotype_fn = locate_ignore_error(self.name + ".geno", 'genotype')
+            genotype_fn = locate_ignore_error(
+                app, self.name + ".geno", 'genotype')
             if genotype_fn:
                 self.samplelist = get_group_samplelists.get_samplelist(
                     "geno", genotype_fn)
@@ -168,12 +172,12 @@ class DatasetGroup:
         # reaper barfs on unicode filenames, so here we ensure it's a string
         if self.genofile:
             if "RData" in self.genofile:  # ZS: This is a temporary fix; I need to change the way the JSON files that point to multiple genotype files are structured to point to other file types like RData
-                full_filename = str(
-                    locate(self.genofile.split(".")[0] + ".geno", 'genotype'))
+                full_filename = str(locate(
+                    app, self.genofile.split(".")[0] + ".geno", 'genotype'))
             else:
-                full_filename = str(locate(self.genofile, 'genotype'))
+                full_filename = str(locate(app, self.genofile, 'genotype'))
         else:
-            full_filename = str(locate(self.name + '.geno', 'genotype'))
+            full_filename = str(locate(app, self.name + '.geno', 'genotype'))
         genotype_1 = gen_geno_ob.genotype(full_filename)
 
         if genotype_1.type == "group" and self.parlist:
diff --git a/wqflask/base/data_set/datasettype.py b/wqflask/base/data_set/datasettype.py
index 05f0f564..ab36a797 100644
--- a/wqflask/base/data_set/datasettype.py
+++ b/wqflask/base/data_set/datasettype.py
@@ -4,11 +4,10 @@ import json
 import requests
 from typing import Optional, Dict
 
-
 from redis import Redis
+from flask import current_app as app
 
-
-from utility.tools import GN2_BASE_URL
+from utility.configuration import get_setting
 from wqflask.database import database_connection
 
 
@@ -41,7 +40,7 @@ class DatasetType:
             # emptied
             try:
                 data = json.loads(requests.get(
-                    GN2_BASE_URL + "/api/v_pre1/gen_dropdown",
+                    get_setting(app, "GN2_BASE_URL") + "/api/v_pre1/gen_dropdown",
                     timeout=5).content)
                 for _species in data['datasets']:
                     for group in data['datasets'][_species]:
diff --git a/wqflask/base/data_set/markers.py b/wqflask/base/data_set/markers.py
index 6f56445e..2fa7cce0 100644
--- a/wqflask/base/data_set/markers.py
+++ b/wqflask/base/data_set/markers.py
@@ -2,16 +2,18 @@
 
 import math
 
-from utility.tools import locate, flat_files
+from flask import current_app as app
+
+from utility.configuration import locate, flat_files
 
 class Markers:
     """Todo: Build in cacheing so it saves us reading the same file more than once"""
 
     def __init__(self, name):
-        json_data_fh = open(locate(name + ".json", 'genotype/json'))
+        json_data_fh = open(locate(app, name + ".json", 'genotype/json'))
 
         markers = []
-        with open("%s/%s_snps.txt" % (flat_files('genotype/bimbam'), name), 'r') as bimbam_fh:
+        with open("%s/%s_snps.txt" % (flat_files(app, 'genotype/bimbam'), name), 'r') as bimbam_fh:
             if len(bimbam_fh.readline().split(", ")) > 2:
                 delimiter = ", "
             elif len(bimbam_fh.readline().split(",")) > 2:
@@ -73,7 +75,7 @@ class HumanMarkers(Markers):
     "Markers for humans ..."
 
     def __init__(self, name, specified_markers=[]):
-        marker_data_fh = open(flat_files('mapping') + '/' + name + '.bim')
+        marker_data_fh = open(flat_files(app, 'mapping') + '/' + name + '.bim')
         self.markers = []
         for line in marker_data_fh:
             splat = line.strip().split()
diff --git a/wqflask/base/data_set/utils.py b/wqflask/base/data_set/utils.py
index 703fee04..465538af 100644
--- a/wqflask/base/data_set/utils.py
+++ b/wqflask/base/data_set/utils.py
@@ -6,9 +6,9 @@ import json
 import hashlib
 from typing import List
 
+from flask import current_app as app
 
-from utility.tools import SQL_URI
-from base.webqtlConfig import TMPDIR
+from utility.configuration import get_setting
 from wqflask.database import parse_db_url, database_connection
 
 def geno_mrna_confidentiality(ob):
@@ -27,7 +27,7 @@ def query_table_timestamp(dataset_type: str):
 
     # computation data and actions
     with database_connection() as conn, conn.cursor() as cursor:
-        fetch_db_name = parse_db_url(SQL_URI)
+        fetch_db_name = parse_db_url(get_setting("SQL_URI"))
         cursor.execute(
             "SELECT UPDATE_TIME FROM "
             "information_schema.tables "
@@ -57,7 +57,7 @@ def cache_dataset_results(dataset_name: str, dataset_type: str, samplelist: List
     samplelist_as_str = ",".join(samplelist)
 
     file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str)
-    file_path = os.path.join(TMPDIR, f"{file_name}.json")
+    file_path = os.path.join(app.config["WEBQTL_TMPDIR"], f"{file_name}.json")
 
     with open(file_path, "w") as file_handler:
         json.dump(query_results, file_handler)
@@ -70,7 +70,7 @@ def fetch_cached_results(dataset_name: str, dataset_type: str, samplelist: List)
     samplelist_as_str = ",".join(samplelist)
 
     file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str)
-    file_path = os.path.join(TMPDIR, f"{file_name}.json")
+    file_path = os.path.join(app.config["WEBQTL_TMPDIR"], f"{file_name}.json")
     try:
         with open(file_path, "r") as file_handler: