aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2023-06-22 12:12:26 +0300
committerFrederick Muriuki Muriithi2023-06-22 12:12:26 +0300
commit7d669eed50a0e39eaa2b4a4769e5d9bbefdb997a (patch)
tree585943056bd7df17e312da3a13a2d87ac12d259c
parent490b0bf8cc5891a23c8850185d21987b5476ba4f (diff)
downloadgenenetwork2-7d669eed50a0e39eaa2b4a4769e5d9bbefdb997a.tar.gz
Fetch configs from app object not modulesdecouple_tools_and_wqflask_app
-rw-r--r--wqflask/base/data_set/__init__.py5
-rw-r--r--wqflask/base/data_set/datasetgroup.py25
-rw-r--r--wqflask/base/data_set/datasettype.py6
-rw-r--r--wqflask/base/data_set/utils.py11
-rw-r--r--wqflask/base/trait.py8
-rw-r--r--wqflask/base/webqtlCaseData.py5
-rw-r--r--wqflask/run_gunicorn.py2
-rw-r--r--wqflask/runserver.py6
-rw-r--r--wqflask/scripts/profile_corrs.py3
-rw-r--r--wqflask/utility/authentication_tools.py8
-rw-r--r--wqflask/utility/pillow_utils.py5
-rw-r--r--wqflask/utility/startup_config.py18
-rw-r--r--wqflask/wqflask/collect.py6
-rw-r--r--wqflask/wqflask/correlation/correlation_gn3_api.py6
-rw-r--r--wqflask/wqflask/correlation/pre_computes.py16
-rw-r--r--wqflask/wqflask/correlation/rust_correlation.py18
-rw-r--r--wqflask/wqflask/correlation_matrix/show_corr_matrix.py6
-rw-r--r--wqflask/wqflask/ctl/gn3_ctl_analysis.py9
-rw-r--r--wqflask/wqflask/do_search.py1
-rw-r--r--wqflask/wqflask/gsearch.py5
-rw-r--r--wqflask/wqflask/heatmap/heatmap.py8
-rw-r--r--wqflask/wqflask/marker_regression/display_mapping_results.py7
-rw-r--r--wqflask/wqflask/marker_regression/gemma_mapping.py76
-rw-r--r--wqflask/wqflask/marker_regression/plink_mapping.py17
-rw-r--r--wqflask/wqflask/marker_regression/qtlreaper_mapping.py10
-rw-r--r--wqflask/wqflask/marker_regression/rqtl_mapping.py14
-rw-r--r--wqflask/wqflask/marker_regression/run_mapping.py11
-rw-r--r--wqflask/wqflask/network_graph/network_graph.py5
-rw-r--r--wqflask/wqflask/oauth2/client.py31
-rw-r--r--wqflask/wqflask/oauth2/request_utils.py7
-rw-r--r--wqflask/wqflask/partial_correlations_views.py8
-rw-r--r--wqflask/wqflask/search_results.py6
-rw-r--r--wqflask/wqflask/show_trait/show_trait.py4
-rw-r--r--wqflask/wqflask/user_login.py12
-rw-r--r--wqflask/wqflask/views.py54
-rw-r--r--wqflask/wqflask/wgcna/gn3_wgcna.py6
36 files changed, 220 insertions, 225 deletions
diff --git a/wqflask/base/data_set/__init__.py b/wqflask/base/data_set/__init__.py
index e49c6a93..27955f8a 100644
--- a/wqflask/base/data_set/__init__.py
+++ b/wqflask/base/data_set/__init__.py
@@ -6,11 +6,12 @@ import pickle as pickle
# 3rd-party imports
from redis import Redis
+from flask import current_app as app
# local imports
from .dataset import DataSet
from base import webqtlConfig
-from utility.tools import USE_REDIS
+from utility.tools import get_setting_bool
from .datasettype import DatasetType
from .tempdataset import TempDataSet
from .datasetgroup import DatasetGroup
@@ -113,7 +114,7 @@ def datasets(group_name, this_group=None, redis_conn=Redis()):
dataset_menu.append(dict(tissue=tissue_name,
datasets=[(dataset, dataset_short)]))
- if USE_REDIS:
+ if get_setting_bool(app, "USE_REDIS"):
redis_conn.set(key, pickle.dumps(dataset_menu, pickle.HIGHEST_PROTOCOL))
redis_conn.expire(key, 60 * 5)
diff --git a/wqflask/base/data_set/datasetgroup.py b/wqflask/base/data_set/datasetgroup.py
index 72577f38..10556dbf 100644
--- a/wqflask/base/data_set/datasetgroup.py
+++ b/wqflask/base/data_set/datasetgroup.py
@@ -3,6 +3,8 @@
import os
import json
+from flask import current_app as app
+
from base import webqtlConfig
from .markers import Markers, HumanMarkers
@@ -13,9 +15,10 @@ from maintenance import get_group_samplelists
from wqflask.database import database_connection
from utility.tools import (
locate,
- USE_REDIS,
flat_files,
+ get_setting,
flat_file_exists,
+ get_setting_bool,
locate_ignore_error)
class DatasetGroup:
@@ -87,8 +90,8 @@ class DatasetGroup:
def get_markers(self):
def check_plink_gemma():
- if flat_file_exists("mapping"):
- MAPPING_PATH = flat_files("mapping") + "/"
+ if flat_file_exists(app, "mapping"):
+ MAPPING_PATH = flat_files(app, "mapping") + "/"
if os.path.isfile(MAPPING_PATH + self.name + ".bed"):
return True
return False
@@ -117,7 +120,7 @@ class DatasetGroup:
def get_study_samplelists(self):
study_sample_file = locate_ignore_error(
- self.name + ".json", 'study_sample_lists')
+ app, self.name + ".json", 'study_sample_lists')
try:
f = open(study_sample_file)
except:
@@ -126,7 +129,7 @@ class DatasetGroup:
return study_samples
def get_genofiles(self):
- jsonfile = "%s/%s.json" % (webqtlConfig.GENODIR, self.name)
+ jsonfile = "%s/%s.json" % (get_setting(app, 'GENODIR'), self.name)
try:
f = open(jsonfile)
except:
@@ -137,20 +140,20 @@ class DatasetGroup:
def get_samplelist(self, redis_conn):
result = None
key = "samplelist:v3:" + self.name
- if USE_REDIS:
+ if get_setting_bool(app, "USE_REDIS"):
result = redis_conn.get(key)
if result is not None:
self.samplelist = json.loads(result)
else:
- genotype_fn = locate_ignore_error(self.name + ".geno", 'genotype')
+ genotype_fn = locate_ignore_error(app, self.name + ".geno", 'genotype')
if genotype_fn:
self.samplelist = get_group_samplelists.get_samplelist(
"geno", genotype_fn)
else:
self.samplelist = None
- if USE_REDIS:
+ if get_setting_bool(app, "USE_REDIS"):
redis_conn.set(key, json.dumps(self.samplelist))
redis_conn.expire(key, 60 * 5)
@@ -169,11 +172,11 @@ class DatasetGroup:
if self.genofile:
if "RData" in self.genofile: # ZS: This is a temporary fix; I need to change the way the JSON files that point to multiple genotype files are structured to point to other file types like RData
full_filename = str(
- locate(self.genofile.split(".")[0] + ".geno", 'genotype'))
+ locate(app, self.genofile.split(".")[0] + ".geno", 'genotype'))
else:
- full_filename = str(locate(self.genofile, 'genotype'))
+ full_filename = str(locate(app, self.genofile, 'genotype'))
else:
- full_filename = str(locate(self.name + '.geno', 'genotype'))
+ full_filename = str(locate(app, self.name + '.geno', 'genotype'))
genotype_1 = gen_geno_ob.genotype(full_filename)
if genotype_1.type == "group" and self.parlist:
diff --git a/wqflask/base/data_set/datasettype.py b/wqflask/base/data_set/datasettype.py
index 05f0f564..c8b78a43 100644
--- a/wqflask/base/data_set/datasettype.py
+++ b/wqflask/base/data_set/datasettype.py
@@ -6,9 +6,9 @@ from typing import Optional, Dict
from redis import Redis
+from flask import current_app as app
-
-from utility.tools import GN2_BASE_URL
+from utility.tools import get_setting
from wqflask.database import database_connection
@@ -41,7 +41,7 @@ class DatasetType:
# emptied
try:
data = json.loads(requests.get(
- GN2_BASE_URL + "/api/v_pre1/gen_dropdown",
+ get_setting(app, "GN2_BASE_URL") + "/api/v_pre1/gen_dropdown",
timeout=5).content)
for _species in data['datasets']:
for group in data['datasets'][_species]:
diff --git a/wqflask/base/data_set/utils.py b/wqflask/base/data_set/utils.py
index 703fee04..bb17a6c7 100644
--- a/wqflask/base/data_set/utils.py
+++ b/wqflask/base/data_set/utils.py
@@ -6,9 +6,10 @@ import json
import hashlib
from typing import List
+from flask import current_app as app
-from utility.tools import SQL_URI
-from base.webqtlConfig import TMPDIR
+
+from utility.tools import get_setting
from wqflask.database import parse_db_url, database_connection
def geno_mrna_confidentiality(ob):
@@ -27,7 +28,7 @@ def query_table_timestamp(dataset_type: str):
# computation data and actions
with database_connection() as conn, conn.cursor() as cursor:
- fetch_db_name = parse_db_url(SQL_URI)
+ fetch_db_name = parse_db_url(get_setting(app, "SQL_URI"))
cursor.execute(
"SELECT UPDATE_TIME FROM "
"information_schema.tables "
@@ -57,7 +58,7 @@ def cache_dataset_results(dataset_name: str, dataset_type: str, samplelist: List
samplelist_as_str = ",".join(samplelist)
file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str)
- file_path = os.path.join(TMPDIR, f"{file_name}.json")
+ file_path = os.path.join(get_setting(app, "TMPDIR"), f"{file_name}.json")
with open(file_path, "w") as file_handler:
json.dump(query_results, file_handler)
@@ -70,7 +71,7 @@ def fetch_cached_results(dataset_name: str, dataset_type: str, samplelist: List)
samplelist_as_str = ",".join(samplelist)
file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str)
- file_path = os.path.join(TMPDIR, f"{file_name}.json")
+ file_path = os.path.join(get_setting(app, "TMPDIR"), f"{file_name}.json")
try:
with open(file_path, "r") as file_handler:
diff --git a/wqflask/base/trait.py b/wqflask/base/trait.py
index 37085448..21b2a716 100644
--- a/wqflask/base/trait.py
+++ b/wqflask/base/trait.py
@@ -7,7 +7,7 @@ from base import webqtlConfig
from base.webqtlCaseData import webqtlCaseData
from base.data_set import create_dataset
from utility.authentication_tools import check_resource_availability
-from utility.tools import GN2_BASE_URL
+from utility.tools import get_setting
from utility.redis_tools import get_redis_conn, get_resource_id
from flask import g, request, url_for
@@ -173,11 +173,11 @@ class GeneralTrait:
alias = 'Not available'
if self.symbol:
human_response = requests.get(
- GN2_BASE_URL + "gn3/gene/aliases/" + self.symbol.upper())
+ get_setting(app, "GN2_BASE_URL") + "gn3/gene/aliases/" + self.symbol.upper())
mouse_response = requests.get(
- GN2_BASE_URL + "gn3/gene/aliases/" + self.symbol.capitalize())
+ get_setting(app, "GN2_BASE_URL") + "gn3/gene/aliases/" + self.symbol.capitalize())
other_response = requests.get(
- GN2_BASE_URL + "gn3/gene/aliases/" + self.symbol.lower())
+ get_setting(app, "GN2_BASE_URL") + "gn3/gene/aliases/" + self.symbol.lower())
if human_response and mouse_response and other_response:
alias_list = json.loads(human_response.content) + json.loads(
diff --git a/wqflask/base/webqtlCaseData.py b/wqflask/base/webqtlCaseData.py
index dd6fad04..d144a342 100644
--- a/wqflask/base/webqtlCaseData.py
+++ b/wqflask/base/webqtlCaseData.py
@@ -21,11 +21,6 @@
# Created by GeneNetwork Core Team 2010/08/10
-import utility.tools
-
-utility.tools.show_settings()
-
-
class webqtlCaseData:
"""one case data in one trait"""
diff --git a/wqflask/run_gunicorn.py b/wqflask/run_gunicorn.py
index 03f310eb..df2849cb 100644
--- a/wqflask/run_gunicorn.py
+++ b/wqflask/run_gunicorn.py
@@ -12,7 +12,7 @@ print("===> Starting up Gunicorn process")
from wqflask import app
from utility.startup_config import app_config
-app_config()
+app_config(app)
@app.route("/gunicorn")
diff --git a/wqflask/runserver.py b/wqflask/runserver.py
index fee29be1..02e525bb 100644
--- a/wqflask/runserver.py
+++ b/wqflask/runserver.py
@@ -9,7 +9,7 @@
from wqflask import app
from utility.startup_config import app_config
-from utility.tools import WEBSERVER_MODE, SERVER_PORT
+from utility.tools import get_setting, get_setting_int
import logging
@@ -18,10 +18,12 @@ GREEN = '\033[92m'
BOLD = '\033[1m'
ENDC = '\033[0m'
-app_config()
+app_config(app)
werkzeug_logger = logging.getLogger('werkzeug')
+WEBSERVER_MODE = get_setting(app, "WEBSERVER_MODE")
+SERVER_PORT = get_setting_int(app, "SERVER_PORT")
if WEBSERVER_MODE == 'DEBUG':
app.debug = True
app.run(host='0.0.0.0',
diff --git a/wqflask/scripts/profile_corrs.py b/wqflask/scripts/profile_corrs.py
index a42e2935..531fc8f7 100644
--- a/wqflask/scripts/profile_corrs.py
+++ b/wqflask/scripts/profile_corrs.py
@@ -5,8 +5,6 @@ import cProfile
from flask import g, request
-from utility.startup_config import app_config
-
from wqflask import app
from wqflask.user_session import UserSession
from wqflask.correlation.correlation_gn3_api import compute_correlation
@@ -60,7 +58,6 @@ if __name__ == "__main__":
"Entry point for profiler script"
return dump_stats(profile_corrs())
- app_config()
with app.app_context():
with app.test_request_context("/corr_compute", data=simulated_form()):
g.user_session = UserSession()
diff --git a/wqflask/utility/authentication_tools.py b/wqflask/utility/authentication_tools.py
index 7d80b3fb..c42f5414 100644
--- a/wqflask/utility/authentication_tools.py
+++ b/wqflask/utility/authentication_tools.py
@@ -1,7 +1,7 @@
import json
import requests
-from flask import g
+from flask import g, current_app as app
from wqflask.database import database_connection
from base import webqtlConfig
@@ -9,7 +9,7 @@ from utility.redis_tools import (get_redis_conn,
get_resource_info,
get_resource_id,
add_resource)
-from utility.tools import GN_PROXY_URL
+from utility.tools import get_setting
Redis = get_redis_conn()
@@ -37,7 +37,7 @@ def check_resource_availability(dataset, user_id, trait_id=None):
return webqtlConfig.SUPER_PRIVILEGES
response = None
- the_url = f"{GN_PROXY_URL}available?resource={resource_id}&user={user_id}"
+ the_url = f"{get_setting(app, 'GN_PROXY_URL')}available?resource={resource_id}&user={user_id}"
try:
response = json.loads(requests.get(the_url).content)
except:
@@ -93,7 +93,7 @@ def get_group_code(dataset):
def check_admin(resource_id=None):
- the_url = GN_PROXY_URL + "available?resource={}&user={}".format(
+ the_url = get_setting('GN_PROXY_URL') + "available?resource={}&user={}".format(
resource_id, g.user_session.user_id)
try:
response = json.loads(requests.get(the_url).content)['admin']
diff --git a/wqflask/utility/pillow_utils.py b/wqflask/utility/pillow_utils.py
index e302df18..524a2be1 100644
--- a/wqflask/utility/pillow_utils.py
+++ b/wqflask/utility/pillow_utils.py
@@ -1,6 +1,7 @@
+from flask import current_app as app
from PIL import Image, ImageColor, ImageDraw, ImageFont
-from utility.tools import TEMPDIR
+from utility.tools import get_setting
BLACK = ImageColor.getrgb("black")
WHITE = ImageColor.getrgb("white")
@@ -15,7 +16,7 @@ def draw_rotated_text(canvas, text, font, xy, fill=BLACK, angle=-90):
draw_text = ImageDraw.Draw(tmp_img)
draw_text.text(text=text, xy=(0, 0), font=font, fill=fill)
tmp_img2 = tmp_img.rotate(angle, expand=1)
- tmp_img2.save("/{0}/{1}.png".format(TEMPDIR, text), format="png")
+ tmp_img2.save("/{0}/{1}.png".format(get_setting(app, 'TEMPDIR'), text), format="png")
canvas.paste(im=tmp_img2, box=tuple([int(i) for i in xy]))
# def draw_open_polygon(canvas: Image, xy: tuple, fill: ImageColor=WHITE, outline: ImageColor=BLACK):
diff --git a/wqflask/utility/startup_config.py b/wqflask/utility/startup_config.py
index 69cac124..3ff72518 100644
--- a/wqflask/utility/startup_config.py
+++ b/wqflask/utility/startup_config.py
@@ -1,7 +1,3 @@
-
-from wqflask import app
-
-from utility.tools import WEBSERVER_MODE
from utility.tools import show_settings
from utility.tools import get_setting_int
from utility.tools import get_setting
@@ -14,28 +10,28 @@ BOLD = '\033[1m'
ENDC = '\033[0m'
-def app_config():
+def app_config(app):
app.config['SESSION_TYPE'] = app.config.get('SESSION_TYPE', 'filesystem')
if not app.config.get('SECRET_KEY'):
import os
app.config['SECRET_KEY'] = str(os.urandom(24))
- mode = WEBSERVER_MODE
+ mode = get_setting(app, "WEBSERVER_MODE")
if mode in ["DEV", "DEBUG"]:
app.config['TEMPLATES_AUTO_RELOAD'] = True
if mode == "DEBUG":
app.debug = True
print("==========================================")
- show_settings()
+ show_settings(app)
- port = get_setting_int("SERVER_PORT")
+ port = get_setting_int(app, "SERVER_PORT")
- if get_setting_bool("USE_GN_SERVER"):
+ if get_setting_bool(app, "USE_GN_SERVER"):
print(f"GN2 API server URL is [{BLUE}GN_SERVER_URL{ENDC}]")
import requests
- page = requests.get(get_setting("GN_SERVER_URL"))
+ page = requests.get(get_setting(app, "GN_SERVER_URL"))
if page.status_code != 200:
raise Exception("API server not found!")
print(f"GN2 is running. Visit {BLUE}"
f"[http://localhost:{str(port)}/{ENDC}]"
- f"({get_setting('WEBSERVER_URL')})")
+ f"({get_setting(app, 'WEBSERVER_URL')})")
diff --git a/wqflask/wqflask/collect.py b/wqflask/wqflask/collect.py
index 8f19b374..8849bd77 100644
--- a/wqflask/wqflask/collect.py
+++ b/wqflask/wqflask/collect.py
@@ -16,7 +16,7 @@ from flask import current_app
from wqflask import app
from utility import hmac
from utility.formatting import numify
-from utility.tools import GN_SERVER_URL, TEMPDIR
+from utility.tools import get_setting
from utility.redis_tools import get_redis_conn
from base.trait import create_trait
@@ -308,7 +308,7 @@ def trait_info_str(trait):
def import_collection():
import_file = request.files['import_file']
if import_file.filename != '':
- file_path = os.path.join(TEMPDIR, import_file.filename)
+ file_path = os.path.join(get_setting(app, "TEMPDIR"), import_file.filename)
import_file.save(file_path)
collection_csv = open(file_path, "r")
traits = [row.strip() for row in collection_csv if row[0] != "#"]
@@ -363,7 +363,7 @@ def view_collection():
collection_info = dict(
trait_obs=trait_obs,
uc=uc,
- heatmap_data_url=urljoin(GN_SERVER_URL, "heatmaps/clustered"))
+ heatmap_data_url=urljoin(get_setting(app, "GN_SERVER_URL"), "heatmaps/clustered"))
if "json" in params:
return json.dumps(json_version)
diff --git a/wqflask/wqflask/correlation/correlation_gn3_api.py b/wqflask/wqflask/correlation/correlation_gn3_api.py
index 64a17548..84f3697b 100644
--- a/wqflask/wqflask/correlation/correlation_gn3_api.py
+++ b/wqflask/wqflask/correlation/correlation_gn3_api.py
@@ -3,7 +3,9 @@ import json
import time
from functools import wraps
-from utility.tools import SQL_URI
+from flask import current_app as app
+
+from utility.tools import get_setting
from wqflask.correlation import correlation_functions
from base import data_set
@@ -146,7 +148,7 @@ def lit_for_trait_list(corr_results, this_dataset, this_trait):
geneid_dict = {trait_name: geneid for (trait_name, geneid) in geneid_dict.items() if
trait_lists.get(trait_name)}
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
correlation_results = compute_all_lit_correlation(
conn=conn, trait_lists=list(geneid_dict.items()),
species=species, gene_id=this_trait_geneid)
diff --git a/wqflask/wqflask/correlation/pre_computes.py b/wqflask/wqflask/correlation/pre_computes.py
index 2831bd39..6115e09b 100644
--- a/wqflask/wqflask/correlation/pre_computes.py
+++ b/wqflask/wqflask/correlation/pre_computes.py
@@ -8,9 +8,9 @@ import lmdb
import pickle
from pathlib import Path
+from wqflask import app
+from utility.tools import get_setting
from base.data_set import query_table_timestamp
-from base.webqtlConfig import TEXTDIR
-from base.webqtlConfig import TMPDIR
from json.decoder import JSONDecodeError
@@ -18,7 +18,7 @@ def cache_trait_metadata(dataset_name, data):
try:
- with lmdb.open(os.path.join(TMPDIR,f"metadata_{dataset_name}"),map_size=20971520) as env:
+ with lmdb.open(os.path.join(get_setting(app, 'TMPDIR'),f"metadata_{dataset_name}"),map_size=20971520) as env:
with env.begin(write=True) as txn:
data_bytes = pickle.dumps(data)
txn.put(f"{dataset_name}".encode(), data_bytes)
@@ -31,7 +31,7 @@ def cache_trait_metadata(dataset_name, data):
def read_trait_metadata(dataset_name):
try:
- with lmdb.open(os.path.join(TMPDIR,f"metadata_{dataset_name}"),
+ with lmdb.open(os.path.join(get_setting(app, 'TMPDIR'),f"metadata_{dataset_name}"),
readonly=True, lock=False) as env:
with env.begin() as txn:
db_name = txn.get(dataset_name.encode())
@@ -44,7 +44,7 @@ def fetch_all_cached_metadata(dataset_name):
"""in a gvein dataset fetch all the traits metadata"""
file_name = generate_filename(dataset_name, suffix="metadata")
- file_path = Path(TMPDIR, file_name)
+ file_path = Path(get_setting(app, 'TMPDIR'), file_name)
try:
with open(file_path, "r+") as file_handler:
@@ -84,7 +84,7 @@ def generate_filename(*args, suffix="", file_ext="json"):
-def fetch_text_file(dataset_name, conn, text_dir=TMPDIR):
+def fetch_text_file(dataset_name, conn, text_dir=get_setting(app, 'TMPDIR')):
"""fetch textfiles with strain vals if exists"""
def __file_scanner__(text_dir, target_file):
@@ -100,7 +100,7 @@ def fetch_text_file(dataset_name, conn, text_dir=TMPDIR):
try:
# checks first for recently generated textfiles if not use gn1 datamatrix
- return __file_scanner__(text_dir, results[0]) or __file_scanner__(TEXTDIR, results[0])
+ return __file_scanner__(text_dir, results[0]) or __file_scanner__(get_setting(app, 'TEXTDIR'), results[0])
except Exception:
pass
@@ -126,7 +126,7 @@ def read_text_file(sample_dict, file_path):
return (sample_vals, [[line[i] for i in _posit] for line in csv_reader])
-def write_db_to_textfile(db_name, conn, text_dir=TMPDIR):
+def write_db_to_textfile(db_name, conn, text_dir=get_setting(app, 'TMPDIR')):
def __sanitise_filename__(filename):
ttable = str.maketrans({" ": "_", "/": "_", "\\": "_"})
diff --git a/wqflask/wqflask/correlation/rust_correlation.py b/wqflask/wqflask/correlation/rust_correlation.py
index 41dd77a1..492a4360 100644
--- a/wqflask/wqflask/correlation/rust_correlation.py
+++ b/wqflask/wqflask/correlation/rust_correlation.py
@@ -2,7 +2,9 @@
import json
from functools import reduce
-from utility.tools import SQL_URI
+from flask import current_app as app
+
+from utility.tools import get_setting
from utility.db_tools import mescape
from utility.db_tools import create_in_clause
from wqflask.correlation.correlation_functions\
@@ -30,7 +32,7 @@ def query_probes_metadata(dataset, trait_list):
if not bool(trait_list) or dataset.type != "ProbeSet":
return []
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
with conn.cursor() as cursor:
query = """
@@ -106,7 +108,7 @@ def chunk_dataset(dataset, steps, name):
ProbeSetXRef.ProbeSetId = ProbeSet.Id
""".format(name)
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
with conn.cursor() as curr:
curr.execute(query)
traits_name_dict = dict(curr.fetchall())
@@ -130,7 +132,7 @@ def compute_top_n_sample(start_vars, dataset, trait_list):
sample_data=json.loads(samples_vals),
dataset_samples=dataset.group.all_samples_ordered())
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
with conn.cursor() as curr:
curr.execute(
"""
@@ -148,7 +150,7 @@ def compute_top_n_sample(start_vars, dataset, trait_list):
if len(trait_list) == 0:
return {}
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
with conn.cursor() as curr:
# fetching strain data in bulk
query = (
@@ -184,7 +186,7 @@ def compute_top_n_lit(corr_results, target_dataset, this_trait) -> dict:
geneid_dict = {trait_name: geneid for (trait_name, geneid)
in geneid_dict.items() if
corr_results.get(trait_name)}
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
return reduce(
lambda acc, corr: {**acc, **corr},
compute_all_lit_correlation(
@@ -258,7 +260,7 @@ def __compute_sample_corr__(
return {}
if target_dataset.type == "ProbeSet" and start_vars.get("use_cache") == "true":
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
file_path = fetch_text_file(target_dataset.name, conn)
if file_path:
(sample_vals, target_data) = read_text_file(
@@ -339,7 +341,7 @@ def __compute_lit_corr__(
(this_trait_geneid, geneid_dict, species) = do_lit_correlation(
this_trait, target_dataset)
- with database_connection(SQL_URI) as conn:
+ with database_connection(get_setting(app, "SQL_URI")) as conn:
return reduce(
lambda acc, lit: {**acc, **lit},
compute_all_lit_correlation(
diff --git a/wqflask/wqflask/correlation_matrix/show_corr_matrix.py b/wqflask/wqflask/correlation_matrix/show_corr_matrix.py
index 617f5c2e..b40a5897 100644
--- a/wqflask/wqflask/correlation_matrix/show_corr_matrix.py
+++ b/wqflask/wqflask/correlation_matrix/show_corr_matrix.py
@@ -24,10 +24,12 @@ import string
import numpy as np
import scipy
+from flask import current_app as app
+
from base.data_set import create_dataset
-from base.webqtlConfig import GENERATED_TEXT_DIR
+from utility.tools import get_setting
from utility.helper_functions import get_trait_db_obs
from utility.corr_result_helpers import normalize_values
from utility.redis_tools import get_redis_conn
@@ -225,7 +227,7 @@ def export_corr_matrix(corr_results):
''.join(random.choice(string.ascii_uppercase + string.digits)
for _ in range(6))
matrix_export_path = "{}{}.csv".format(
- GENERATED_TEXT_DIR, corr_matrix_filename)
+ get_setting(app, 'GENERATED_TEXT_DIR'), corr_matrix_filename)
with open(matrix_export_path, "w+") as output_file:
output_file.write(
"Time/Date: " + datetime.datetime.now().strftime("%x / %X") + "\n")
diff --git a/wqflask/wqflask/ctl/gn3_ctl_analysis.py b/wqflask/wqflask/ctl/gn3_ctl_analysis.py
index 8f790597..637b1136 100644
--- a/wqflask/wqflask/ctl/gn3_ctl_analysis.py
+++ b/wqflask/wqflask/ctl/gn3_ctl_analysis.py
@@ -1,9 +1,10 @@
import requests
import itertools
+from flask import current_app
+
from utility import genofile_parser
-from utility.tools import GN3_LOCAL_URL
-from utility.tools import locate
+from utility.tools import locate, get_setting
from base.trait import create_trait
from base.trait import retrieve_sample_data
@@ -33,7 +34,7 @@ def parse_geno_data(dataset_group_name) -> dict:
@returns : dict with keys genotypes,markernames & individuals
"""
- genofile_location = locate(dataset_group_name + ".geno", "genotype")
+ genofile_location = locate(app, dataset_group_name + ".geno", "genotype")
parser = genofile_parser.ConvertGenoFile(genofile_location)
parser.process_csv()
markers = []
@@ -100,7 +101,7 @@ def parse_form_data(form_data: dict):
def run_ctl(requestform):
"""function to make an api call
to gn3 and run ctl"""
- ctl_api = f"{GN3_LOCAL_URL}/api/ctl/run_ctl"
+ ctl_api = f"{get_setting(app, 'GN3_LOCAL_URL')}/api/ctl/run_ctl"
form_data = parse_form_data(requestform.to_dict())
trait_db_list = form_data["trait_db_list"]
diff --git a/wqflask/wqflask/do_search.py b/wqflask/wqflask/do_search.py
index fbeb7a49..b5ddb1dc 100644
--- a/wqflask/wqflask/do_search.py
+++ b/wqflask/wqflask/do_search.py
@@ -11,7 +11,6 @@ from pprint import pformat as pf
import sys
from db import webqtlDatabaseFunction
-from utility.tools import GN2_BASE_URL
class DoSearch:
diff --git a/wqflask/wqflask/gsearch.py b/wqflask/wqflask/gsearch.py
index 72c55153..a9972ac0 100644
--- a/wqflask/wqflask/gsearch.py
+++ b/wqflask/wqflask/gsearch.py
@@ -3,10 +3,11 @@ from urllib.parse import urlencode, urljoin
from pymonad.maybe import Just, Maybe
from pymonad.tools import curry
import requests
+from flask import current_app as app
from gn3.monads import MonadicDict
from utility.hmac import hmac_creation
-from utility.tools import GN3_LOCAL_URL
+from utility.tools import get_setting
from base import webqtlConfig
# KLUDGE: Due to the lack of pagination, we hard-limit the maximum
@@ -29,7 +30,7 @@ class GSearch:
convert_lod = lambda x: x / 4.61
self.trait_list = []
for i, trait in enumerate(requests.get(
- urljoin(GN3_LOCAL_URL, "/api/search?" + urlencode({"query": self.terms,
+ urljoin(get_setting(app, "GN3_LOCAL_URL"), "/api/search?" + urlencode({"query": self.terms,
"type": self.type,
"per_page": MAX_SEARCH_RESULTS}))).json()):
trait = MonadicDict(trait)
diff --git a/wqflask/wqflask/heatmap/heatmap.py b/wqflask/wqflask/heatmap/heatmap.py
index 8ef85d3c..441e00b4 100644
--- a/wqflask/wqflask/heatmap/heatmap.py
+++ b/wqflask/wqflask/heatmap/heatmap.py
@@ -5,7 +5,7 @@ from base import species
from base import webqtlConfig
from utility import helper_functions
-from utility.tools import flat_files, REAPER_COMMAND, TEMPDIR
+from utility.tools import flat_files, get_setting
from redis import Redis
from flask import Flask, g
@@ -119,9 +119,9 @@ class Heatmap:
''.join(random.choice(string.ascii_uppercase + string.digits)
for _ in range(6))
- reaper_command = REAPER_COMMAND + ' --geno {0}/{1}.geno --traits {2}/gn2/{3}.txt -n 1000 -o {4}{5}.txt'.format(flat_files('genotype'),
+ reaper_command = get_setting(app, 'REAPER_COMMAND') + ' --geno {0}/{1}.geno --traits {2}/gn2/{3}.txt -n 1000 -o {4}{5}.txt'.format(flat_files(app, 'genotype'),
genofile_name,
- TEMPDIR,
+ get_setting(app, 'TEMPDIR'),
trait_filename,
webqtlConfig.GENERATED_IMAGE_DIR,
output_filename)
@@ -145,7 +145,7 @@ class Heatmap:
def gen_pheno_txt_file(samples, vals, filename):
"""Generates phenotype file for GEMMA"""
- with open("{0}/gn2/{1}.txt".format(TEMPDIR, filename), "w") as outfile:
+ with open("{0}/gn2/{1}.txt".format(get_setting(app, 'TEMPDIR'), filename), "w") as outfile:
outfile.write("Trait\t")
filtered_sample_list = []
diff --git a/wqflask/wqflask/marker_regression/display_mapping_results.py b/wqflask/wqflask/marker_regression/display_mapping_results.py
index bf89b0db..69432677 100644
--- a/wqflask/wqflask/marker_regression/display_mapping_results.py
+++ b/wqflask/wqflask/marker_regression/display_mapping_results.py
@@ -35,13 +35,14 @@ import os
import json
import htmlgen as HT
+from flask import current_app as app
from base import webqtlConfig
from base.GeneralObject import GeneralObject
from utility import webqtlUtil
from utility import Plot
from wqflask.interval_analyst import GeneUtil
-from base.webqtlConfig import GENERATED_IMAGE_DIR
+from utility.tools import get_setting
from utility.pillow_utils import draw_rotated_text, draw_open_polygon
from wqflask.database import database_connection
@@ -607,7 +608,7 @@ class DisplayMappingResults:
self.filename = webqtlUtil.genRandStr("Itvl_")
intCanvas.save(
"{}.png".format(
- os.path.join(webqtlConfig.GENERATED_IMAGE_DIR, self.filename)),
+ os.path.join(get_setting(app, 'GENERATED_IMAGE_DIR'), self.filename)),
format='png')
intImg = HtmlGenWrapper.create_image_tag(
src="/image/{}.png".format(self.filename),
@@ -622,7 +623,7 @@ class DisplayMappingResults:
intCanvasX2, startMb=self.startMb, endMb=self.endMb, showLocusForm=showLocusForm, zoom=2)
intCanvasX2.save(
"{}.png".format(
- os.path.join(webqtlConfig.GENERATED_IMAGE_DIR,
+ os.path.join(get_setting(app, 'GENERATED_IMAGE_DIR'),
self.filename + "X2")),
format='png')
diff --git a/wqflask/wqflask/marker_regression/gemma_mapping.py b/wqflask/wqflask/marker_regression/gemma_mapping.py
index 4420796c..c90b62b8 100644
--- a/wqflask/wqflask/marker_regression/gemma_mapping.py
+++ b/wqflask/wqflask/marker_regression/gemma_mapping.py
@@ -4,21 +4,23 @@ import string
import random
import json
+from flask import current_app as app
+
from base import webqtlConfig
from base.trait import create_trait
from base.data_set import create_dataset
from utility.redis_tools import get_redis_conn
-from utility.tools import flat_files, assert_file
-from utility.tools import GEMMA_WRAPPER_COMMAND
-from utility.tools import TEMPDIR
-from utility.tools import WEBSERVER_MODE
+from utility.tools import flat_files, assert_file, get_setting
from wqflask.database import database_connection
from gn3.computations.gemma import generate_hash_of_string
-GEMMAOPTS = "-debug"
-if WEBSERVER_MODE == 'PROD':
- GEMMAOPTS = "-no-check"
+def gemma_opts(app) -> str:
+ """Retrieve the gemma options."""
+ GEMMAOPTS = "-debug"
+ if get_setting(app, "WEBSERVER_MODE") == 'PROD':
+ GEMMAOPTS = "-no-check"
+ return GEMMAOPTS
def generate_random_n_string(n):
@@ -59,66 +61,66 @@ def run_gemma(this_trait, this_dataset, samples, vals, covariates, use_loco,
if covariates != "":
covar_filename = gen_covariates_file(this_dataset, covariates, samples)
if str(use_loco).lower() == "true":
- bimbam_dir = flat_files('genotype/bimbam')
+ bimbam_dir = flat_files(app, 'genotype/bimbam')
geno_filepath = assert_file(
f"{bimbam_dir}/{genofile_name}_geno.txt")
- pheno_filepath = f"{TEMPDIR}/gn2/{pheno_filename}.txt"
+ pheno_filepath = f"{get_setting(app, 'TEMPDIR')}/gn2/{pheno_filename}.txt"
snps_filepath = assert_file(
f"{bimbam_dir}/{genofile_name}_snps.txt")
- k_json_output_filepath = f"{TEMPDIR}/gn2/{k_output_filename}.json"
- generate_k_command = (f"{GEMMA_WRAPPER_COMMAND} --json --loco "
- f"{chr_list_string} -- {GEMMAOPTS} "
+ k_json_output_filepath = f"{get_setting(app, 'TEMPDIR')}/gn2/{k_output_filename}.json"
+ generate_k_command = (f"{get_setting(app, 'GEMMA_WRAPPER_COMMAND')} --json --loco "
+ f"{chr_list_string} -- {gemma_opts(app)} "
f"-g {geno_filepath} -p "
f"{pheno_filepath} -a "
f"{snps_filepath} -gk > "
f"{k_json_output_filepath}")
os.system(generate_k_command)
- gemma_command = (f"{GEMMA_WRAPPER_COMMAND} --json --loco "
+ gemma_command = (f"{get_setting(app, 'GEMMA_WRAPPER_COMMAND')} --json --loco "
f"--input {k_json_output_filepath} "
- f"-- {GEMMAOPTS} "
+ f"-- {gemma_opts(app)} "
f"-g {geno_filepath} "
f"-p {pheno_filepath} ")
if covariates != "":
- gemma_command += (f"-c {flat_files('mapping')}/"
+ gemma_command += (f"-c {flat_files(app, 'mapping')}/"
f"{covar_filename}.txt "
- f"-a {flat_files('genotype/bimbam')}/"
+ f"-a {flat_files(app, 'genotype/bimbam')}/"
f"{genofile_name}_snps.txt "
- f"-lmm 9 -maf {maf} > {TEMPDIR}/gn2/"
+ f"-lmm 9 -maf {maf} > {get_setting(app, 'TEMPDIR')}/gn2/"
f"{gwa_output_filename}.json")
else:
- gemma_command += (f"-a {flat_files('genotype/bimbam')}/"
+ gemma_command += (f"-a {flat_files(app, 'genotype/bimbam')}/"
f"{genofile_name}_snps.txt -lmm 9 -maf "
f"{maf} > "
- f"{TEMPDIR}/gn2/{gwa_output_filename}.json")
+ f"{get_setting(app, 'TEMPDIR')}/gn2/{gwa_output_filename}.json")
else:
- generate_k_command = (f"{GEMMA_WRAPPER_COMMAND} --json -- "
- f"{GEMMAOPTS} "
- f" -g {flat_files('genotype/bimbam')}/"
+ generate_k_command = (f"{get_setting(app, 'GEMMA_WRAPPER_COMMAND')} --json -- "
+ f"{gemma_opts(app)} "
+ f" -g {flat_files(app, 'genotype/bimbam')}/"
f"{genofile_name}_geno.txt -p "
- f"{TEMPDIR}/gn2/{pheno_filename}.txt -a "
- f"{flat_files('genotype/bimbam')}/"
+ f"{get_setting(app, 'TEMPDIR')}/gn2/{pheno_filename}.txt -a "
+ f"{flat_files(app, 'genotype/bimbam')}/"
f"{genofile_name}_snps.txt -gk > "
- f"{TEMPDIR}/gn2/{k_output_filename}.json")
+ f"{get_setting(app, 'TEMPDIR')}/gn2/{k_output_filename}.json")
os.system(generate_k_command)
- gemma_command = (f"{GEMMA_WRAPPER_COMMAND} --json --input "
- f"{TEMPDIR}/gn2/{k_output_filename}.json -- "
- f"{GEMMAOPTS} "
- f"-a {flat_files('genotype/bimbam')}/"
+ gemma_command = (f"{get_setting(app, 'GEMMA_WRAPPER_COMMAND')} --json --input "
+ f"{get_setting(app, 'TEMPDIR')}/gn2/{k_output_filename}.json -- "
+ f"{gemma_opts(app)} "
+ f"-a {flat_files(app, 'genotype/bimbam')}/"
f"{genofile_name}_snps.txt "
- f"-lmm 9 -g {flat_files('genotype/bimbam')}/"
+ f"-lmm 9 -g {flat_files(app, 'genotype/bimbam')}/"
f"{genofile_name}_geno.txt -p "
- f"{TEMPDIR}/gn2/{pheno_filename}.txt ")
+ f"{get_setting(app, 'TEMPDIR')}/gn2/{pheno_filename}.txt ")
if covariates != "":
- gemma_command += (f" -c {flat_files('mapping')}/"
+ gemma_command += (f" -c {flat_files(app, 'mapping')}/"
f"{covar_filename}.txt > "
- f"{TEMPDIR}/gn2/{gwa_output_filename}.json")
+ f"{get_setting(app, 'TEMPDIR')}/gn2/{gwa_output_filename}.json")
else:
- gemma_command += f" > {TEMPDIR}/gn2/{gwa_output_filename}.json"
+ gemma_command += f" > {get_setting(app, 'TEMPDIR')}/gn2/{gwa_output_filename}.json"
os.system(gemma_command)
else:
@@ -138,7 +140,7 @@ def gen_pheno_txt_file(this_dataset, genofile_name, vals):
filename = "PHENO_" + generate_hash_of_string(this_dataset.name + str(vals)).replace("/", "_")
- with open(f"{TEMPDIR}/gn2/{filename}.txt", "w") as outfile:
+ with open(f"{get_setting(app, 'TEMPDIR')}/gn2/{filename}.txt", "w") as outfile:
for value in vals:
if value == "x":
outfile.write("NA\n")
@@ -178,7 +180,7 @@ def gen_covariates_file(this_dataset, covariates, samples):
filename = "COVAR_" + generate_hash_of_string(this_dataset.name + str(covariate_data_object)).replace("/", "_")
- with open((f"{flat_files('mapping')}/"
+ with open((f"{flat_files(app, 'mapping')}/"
f"{filename}.txt"),
"w") as outfile:
for i in range(len(covariate_data_object[0])):
@@ -191,7 +193,7 @@ def gen_covariates_file(this_dataset, covariates, samples):
def parse_loco_output(this_dataset, gwa_output_filename, loco="True"):
- output_filename = f"{TEMPDIR}/gn2/{gwa_output_filename}.json"
+ output_filename = f"{get_setting(app, 'TEMPDIR')}/gn2/{gwa_output_filename}.json"
if os.stat(output_filename).st_size == 0:
return {}
diff --git a/wqflask/wqflask/marker_regression/plink_mapping.py b/wqflask/wqflask/marker_regression/plink_mapping.py
index 75ee189e..f597c3a8 100644
--- a/wqflask/wqflask/marker_regression/plink_mapping.py
+++ b/wqflask/wqflask/marker_regression/plink_mapping.py
@@ -1,9 +1,10 @@
import string
import os
-from base.webqtlConfig import TMPDIR
+from flask import current_app as app
+
from utility import webqtlUtil
-from utility.tools import flat_files, PLINK_COMMAND
+from utility.tools import flat_files, get_setting
def run_plink(this_trait, dataset, species, vals, maf):
@@ -11,7 +12,7 @@ def run_plink(this_trait, dataset, species, vals, maf):
f"{dataset.group.name}_{this_trait.name}_")
gen_pheno_txt_file(dataset, vals)
- plink_command = f"{PLINK_COMMAND} --noweb --bfile {flat_files('mapping')}/{dataset.group.name} --no-pheno --no-fid --no-parents --no-sex --maf {maf} --out { TMPDIR}{plink_output_filename} --assoc "
+ plink_command = f"{get_setting(app, 'PLINK_COMMAND')} --noweb --bfile {flat_files(app, 'mapping')}/{dataset.group.name} --no-pheno --no-fid --no-parents --no-sex --maf {maf} --out { get_setting(app, 'TMPDIR')}{plink_output_filename} --assoc "
os.system(plink_command)
@@ -26,12 +27,12 @@ def gen_pheno_txt_file(this_dataset, vals):
"""Generates phenotype file for GEMMA/PLINK"""
current_file_data = []
- with open(f"{flat_files('mapping')}/{this_dataset.group.name}.fam", "r") as outfile:
+ with open(f"{flat_files(app, 'mapping')}/{this_dataset.group.name}.fam", "r") as outfile:
for i, line in enumerate(outfile):
split_line = line.split()
current_file_data.append(split_line)
- with open(f"{flat_files('mapping')}/{this_dataset.group.name}.fam", "w") as outfile:
+ with open(f"{flat_files(app, 'mapping')}/{this_dataset.group.name}.fam", "w") as outfile:
for i, line in enumerate(current_file_data):
if vals[i] == "x":
this_val = -9
@@ -43,7 +44,7 @@ def gen_pheno_txt_file(this_dataset, vals):
def gen_pheno_txt_file_plink(this_trait, dataset, vals, pheno_filename=''):
ped_sample_list = get_samples_from_ped_file(dataset)
- output_file = open(f"{TMPDIR}{pheno_filename}.txt", "wb")
+ output_file = open(f"{get_setting(app, 'TMPDIR')}{pheno_filename}.txt", "wb")
header = f"FID\tIID\t{this_trait.name}\n"
output_file.write(header)
@@ -79,7 +80,7 @@ def gen_pheno_txt_file_plink(this_trait, dataset, vals, pheno_filename=''):
def get_samples_from_ped_file(dataset):
- ped_file = open(f"{flat_files('mapping')}{dataset.group.name}.ped", "r")
+ ped_file = open(f"{flat_files(app, 'mapping')}{dataset.group.name}.ped", "r")
line = ped_file.readline()
sample_list = []
@@ -100,7 +101,7 @@ def parse_plink_output(output_filename, species):
threshold_p_value = 1
- result_fp = open(f"{TMPDIR}{output_filename}.qassoc", "rb")
+ result_fp = open(f"{get_setting(app, 'TMPDIR')}{output_filename}.qassoc", "rb")
line = result_fp.readline()
diff --git a/wqflask/wqflask/marker_regression/qtlreaper_mapping.py b/wqflask/wqflask/marker_regression/qtlreaper_mapping.py
index 4d5db2ee..3db2f484 100644
--- a/wqflask/wqflask/marker_regression/qtlreaper_mapping.py
+++ b/wqflask/wqflask/marker_regression/qtlreaper_mapping.py
@@ -8,7 +8,7 @@ import re
from base import webqtlConfig
from base.trait import GeneralTrait
from base.data_set import create_dataset
-from utility.tools import flat_files, REAPER_COMMAND, TEMPDIR
+from utility.tools import flat_files, get_setting
def run_reaper(this_trait, this_dataset, samples, vals, json_data, num_perm, boot_check, num_bootstrap, do_control, control_marker, manhattan_plot, first_run=True, output_files=None):
@@ -54,11 +54,11 @@ def run_reaper(this_trait, this_dataset, samples, vals, json_data, num_perm, boo
if manhattan_plot != True:
opt_list.append("--interval 1")
- reaper_command = (REAPER_COMMAND +
- ' --geno {0}/{1}.geno --traits {2}/gn2/{3}.txt {4} -o {5}{6}.txt'.format(flat_files('genotype'),
+ reaper_command = (get_setting(app, 'REAPER_COMMAND') +
+ ' --geno {0}/{1}.geno --traits {2}/gn2/{3}.txt {4} -o {5}{6}.txt'.format(flat_files(app, 'genotype'),
genofile_name,
- TEMPDIR,
+ get_setting(app, 'TEMPDIR'),
trait_filename,
" ".join(
opt_list),
@@ -84,7 +84,7 @@ def run_reaper(this_trait, this_dataset, samples, vals, json_data, num_perm, boo
def gen_pheno_txt_file(samples, vals, trait_filename):
"""Generates phenotype file for GEMMA"""
- with open(f"{TEMPDIR}/gn2/{trait_filename}.txt", "w") as outfile:
+ with open(f"{get_setting(app, 'TEMPDIR')}/gn2/{trait_filename}.txt", "w") as outfile:
outfile.write("Trait\t")
filtered_sample_list = []
diff --git a/wqflask/wqflask/marker_regression/rqtl_mapping.py b/wqflask/wqflask/marker_regression/rqtl_mapping.py
index 9a42bc35..3a452664 100644
--- a/wqflask/wqflask/marker_regression/rqtl_mapping.py
+++ b/wqflask/wqflask/marker_regression/rqtl_mapping.py
@@ -10,11 +10,11 @@ from typing import Optional
from typing import TextIO
import numpy as np
+from flask import current_app as app
-from base.webqtlConfig import TMPDIR
from base.trait import create_trait
from utility.redis_tools import get_redis_conn
-from utility.tools import locate, GN3_LOCAL_URL
+from utility.tools import locate
from wqflask.database import database_connection
@@ -23,9 +23,9 @@ def run_rqtl(trait_name, vals, samples, dataset, pair_scan, mapping_scale, model
pheno_file = write_phenotype_file(trait_name, samples, vals, dataset, cofactors, perm_strata_list)
if dataset.group.genofile:
- geno_file = locate(dataset.group.genofile, "genotype")
+ geno_file = locate(app, dataset.group.genofile, "genotype")
else:
- geno_file = locate(dataset.group.name + ".geno", "genotype")
+ geno_file = locate(app, dataset.group.name + ".geno", "genotype")
post_data = {
"pheno_file": pheno_file,
@@ -54,7 +54,7 @@ def run_rqtl(trait_name, vals, samples, dataset, pair_scan, mapping_scale, model
if perm_strata_list:
post_data["pstrata"] = True
- rqtl_output = requests.post(GN3_LOCAL_URL + "api/rqtl/compute", data=post_data).json()
+ rqtl_output = requests.post(get_setting(app, "GN3_LOCAL_URL") + "api/rqtl/compute", data=post_data).json()
if num_perm > 0:
return rqtl_output['perm_results'], rqtl_output['suggestive'], rqtl_output['significant'], rqtl_output['results']
else:
@@ -90,7 +90,7 @@ def write_covarstruct_file(cofactors: str) -> str:
writer.writerow([cofactor_name, datatype])
hash_of_file = get_hash_of_textio(covar_struct_file)
- file_path = TMPDIR + hash_of_file + ".csv"
+ file_path = get_setting(app, 'TMPDIR') + hash_of_file + ".csv"
with open(file_path, "w") as fd:
covar_struct_file.seek(0)
@@ -133,7 +133,7 @@ def write_phenotype_file(trait_name: str,
writer.writerow(this_row)
hash_of_file = get_hash_of_textio(pheno_file)
- file_path = TMPDIR + hash_of_file + ".csv"
+ file_path = get_setting(app, 'TMPDIR') + hash_of_file + ".csv"
with open(file_path, "w") as fd:
pheno_file.seek(0)
diff --git a/wqflask/wqflask/marker_regression/run_mapping.py b/wqflask/wqflask/marker_regression/run_mapping.py
index 952d9749..b7c381c5 100644
--- a/wqflask/wqflask/marker_regression/run_mapping.py
+++ b/wqflask/wqflask/marker_regression/run_mapping.py
@@ -23,7 +23,7 @@ import simplejson as json
from redis import Redis
Redis = Redis()
-from flask import Flask, g
+from flask import Flask, g, current_app as app
from base.trait import GeneralTrait
from base import data_set
@@ -35,9 +35,8 @@ from wqflask.database import database_connection
from wqflask.marker_regression import gemma_mapping, rqtl_mapping, qtlreaper_mapping, plink_mapping
from wqflask.show_trait.SampleList import SampleList
-from utility.tools import locate, locate_ignore_error, GEMMA_COMMAND, PLINK_COMMAND, TEMPDIR
+from utility.tools import get_setting
from utility.external import shell
-from base.webqtlConfig import TMPDIR, GENERATED_TEXT_DIR
Redis = get_redis_conn()
@@ -467,7 +466,7 @@ class RunMapping:
self.this_trait, self.dataset, self.vals, pheno_filename=output_filename)
rqtl_command = './plink --noweb --ped %s.ped --no-fid --no-parents --no-sex --no-pheno --map %s.map --pheno %s/%s.txt --pheno-name %s --maf %s --missing-phenotype -9999 --out %s%s --assoc ' % (
- self.dataset.group.name, self.dataset.group.name, TMPDIR, plink_output_filename, self.this_trait.name, self.maf, TMPDIR, plink_output_filename)
+ self.dataset.group.name, self.dataset.group.name, get_setting(app, 'TMPDIR'), plink_output_filename, self.this_trait.name, self.maf, get_setting(app, 'TMPDIR'), plink_output_filename)
os.system(rqtl_command)
@@ -646,8 +645,8 @@ def write_input_for_browser(this_dataset, gwas_results, annotations):
for _ in range(6))
gwas_filename = file_base + "_GWAS"
annot_filename = file_base + "_ANNOT"
- gwas_path = "{}/gn2/".format(TEMPDIR) + gwas_filename
- annot_path = "{}/gn2/".format(TEMPDIR) + annot_filename
+ gwas_path = "{}/gn2/".format(get_setting(app, 'TEMPDIR')) + gwas_filename
+ annot_path = "{}/gn2/".format(get_setting(app, 'TEMPDIR')) + annot_filename
with open(gwas_path + ".json", "w") as gwas_file, open(annot_path + ".json", "w") as annot_file:
gwas_file.write(json.dumps(gwas_results))
diff --git a/wqflask/wqflask/network_graph/network_graph.py b/wqflask/wqflask/network_graph/network_graph.py
index 9b70f03d..13d00ed5 100644
--- a/wqflask/wqflask/network_graph/network_graph.py
+++ b/wqflask/wqflask/network_graph/network_graph.py
@@ -20,12 +20,13 @@
import scipy
import simplejson as json
+from flask import current_app as app
from base.trait import create_trait
from base import data_set
from utility import helper_functions
from utility import corr_result_helpers
-from utility.tools import GN2_BRANCH_URL
+from utility.tools import get_setting
class NetworkGraph:
@@ -173,7 +174,7 @@ class NetworkGraph:
self.nodes_list.append(node_dict)
self.elements = json.dumps(self.nodes_list + self.edges_list)
- self.gn2_url = GN2_BRANCH_URL
+ self.gn2_url = get_setting(app, "GN2_BRANCH_URL")
groups = []
for sample in self.all_sample_list:
diff --git a/wqflask/wqflask/oauth2/client.py b/wqflask/wqflask/oauth2/client.py
index 2a06b156..b0131a8f 100644
--- a/wqflask/wqflask/oauth2/client.py
+++ b/wqflask/wqflask/oauth2/client.py
@@ -12,15 +12,15 @@ from authlib.integrations.requests_client import OAuth2Session
from wqflask.oauth2 import session
from wqflask.oauth2.checks import user_logged_in
+from utility.tools import get_setting
+
SCOPE = ("profile group role resource register-client user masquerade "
"introspect migrate-data")
def oauth2_client():
def __client__(token) -> OAuth2Session:
- from utility.tools import (
- GN_SERVER_URL, OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET)
return OAuth2Session(
- OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET,
+ get_setting(app, "OAUTH2_CLIENT_ID"), get_setting(app, "OAUTH2_CLIENT_SECRET"),
scope=SCOPE, token_endpoint_auth_method="client_secret_post",
token=token)
return session.user_token().either(
@@ -39,13 +39,12 @@ def __no_token__(_err) -> Left:
def oauth2_get(uri_path: str, data: dict = {}, **kwargs) -> Either:
def __get__(token) -> Either:
- from utility.tools import (
- GN_SERVER_URL, OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET)
client = OAuth2Session(
- OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET,
+ get_setting(app, "OAUTH2_CLIENT_ID"),
+ get_setting(app, "OAUTH2_CLIENT_SECRET"),
token=token, scope=SCOPE)
resp = client.get(
- urljoin(GN_SERVER_URL, uri_path),
+ urljoin(get_setting(app, 'GN_SERVER_URL'), uri_path),
data=data,
**kwargs)
if resp.status_code == 200:
@@ -59,13 +58,12 @@ def oauth2_post(
uri_path: str, data: Optional[dict] = None, json: Optional[dict] = None,
**kwargs) -> Either:
def __post__(token) -> Either:
- from utility.tools import (
- GN_SERVER_URL, OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET)
client = OAuth2Session(
- OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET,
+ get_setting(app, "OAUTH2_CLIENT_ID"),
+ get_setting(app, "OAUTH2_CLIENT_SECRET"),
token=token, scope=SCOPE)
resp = client.post(
- urljoin(GN_SERVER_URL, uri_path), data=data, json=json,
+ urljoin(get_setting(app, 'GN_SERVER_URL'), uri_path), data=data, json=json,
**kwargs)
if resp.status_code == 200:
return Right(resp.json())
@@ -75,22 +73,19 @@ def oauth2_post(
return session.user_token().either(__no_token__, __post__)
def no_token_get(uri_path: str, **kwargs) -> Either:
- from utility.tools import GN_SERVER_URL
- resp = requests.get(urljoin(GN_SERVER_URL, uri_path), **kwargs)
+ resp = requests.get(urljoin(get_setting(app, 'GN_SERVER_URL'), uri_path), **kwargs)
if resp.status_code == 200:
return Right(resp.json())
return Left(resp)
def no_token_post(uri_path: str, **kwargs) -> Either:
- from utility.tools import (
- GN_SERVER_URL, OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET)
data = kwargs.get("data", {})
the_json = kwargs.get("json", {})
request_data = {
**data,
**the_json,
- "client_id": OAUTH2_CLIENT_ID,
- "client_secret": OAUTH2_CLIENT_SECRET
+ "client_id": get_setting(app, "OAUTH2_CLIENT_ID"),
+ "client_secret": get_setting(app, "OAUTH2_CLIENT_SECRET")
}
new_kwargs = {
**{
@@ -99,7 +94,7 @@ def no_token_post(uri_path: str, **kwargs) -> Either:
},
("data" if bool(data) else "json"): request_data
}
- resp = requests.post(urljoin(GN_SERVER_URL, uri_path),
+ resp = requests.post(urljoin(get_setting(app, 'GN_SERVER_URL'), uri_path),
**new_kwargs)
if resp.status_code == 200:
return Right(resp.json())
diff --git a/wqflask/wqflask/oauth2/request_utils.py b/wqflask/wqflask/oauth2/request_utils.py
index 992720f1..efaa8a8a 100644
--- a/wqflask/wqflask/oauth2/request_utils.py
+++ b/wqflask/wqflask/oauth2/request_utils.py
@@ -7,16 +7,17 @@ from flask import (
flash, request, session, url_for, redirect, Response, render_template,
current_app as app)
+from utility.tools import get_setting
+
from .client import SCOPE, oauth2_get
def authserver_authorise_uri():
- from utility.tools import GN_SERVER_URL, OAUTH2_CLIENT_ID
req_baseurl = urlparse(request.base_url)
host_uri = f"{req_baseurl.scheme}://{req_baseurl.netloc}/"
return urljoin(
- GN_SERVER_URL,
+ get_setting(app, "GN_SERVER_URL"),
"oauth2/authorise?response_type=code"
- f"&client_id={OAUTH2_CLIENT_ID}"
+ f"&client_id={get_setting(app, 'OAUTH2_CLIENT_ID')}"
f"&redirect_uri={urljoin(host_uri, 'oauth2/code')}")
def raise_unimplemented():
diff --git a/wqflask/wqflask/partial_correlations_views.py b/wqflask/wqflask/partial_correlations_views.py
index a11d902c..a62410d0 100644
--- a/wqflask/wqflask/partial_correlations_views.py
+++ b/wqflask/wqflask/partial_correlations_views.py
@@ -14,7 +14,7 @@ from flask import (
render_template)
from wqflask import app
-from utility.tools import GN_SERVER_URL
+from utility.tools import get_setting
from wqflask.database import database_connection
from gn3.db.partial_correlations import traits_info
@@ -288,7 +288,7 @@ def partial_correlations():
"with_target_db": args["with_target_db"]
}
return handle_response(requests.post(
- url=urljoin(GN_SERVER_URL, "correlation/partial"),
+ url=urljoin(get_setting(current_app, 'GN_SERVER_URL'), "correlation/partial"),
json=post_data))
for error in args["errors"]:
@@ -303,7 +303,7 @@ def partial_correlations():
"with_target_db": args["with_target_db"]
}
return handle_response(requests.post(
- url=urljoin(GN_SERVER_URL, "correlation/partial"),
+ url=urljoin(get_setting(current_app, 'GN_SERVER_URL'), "correlation/partial"),
json=post_data))
for error in args["errors"]:
@@ -348,7 +348,7 @@ def process_pcorrs_command_output(result):
@app.route("/partial_correlations/<command_id>", methods=["GET"])
def poll_partial_correlation_results(command_id):
response = requests.get(
- url=urljoin(GN_SERVER_URL, f"async_commands/state/{command_id}"))
+ url=urljoin(get_setting(current_app, 'GN_SERVER_URL'), f"async_commands/state/{command_id}"))
if response.status_code == 200:
data = response.json()
diff --git a/wqflask/wqflask/search_results.py b/wqflask/wqflask/search_results.py
index 6222dd88..1ec9daf9 100644
--- a/wqflask/wqflask/search_results.py
+++ b/wqflask/wqflask/search_results.py
@@ -6,7 +6,7 @@ import re
import json
-from flask import g
+from flask import g, current_app as app
from base.data_set import create_dataset
from base.webqtlConfig import PUBMEDLINK_URL
@@ -17,7 +17,7 @@ from wqflask.database import database_connection
from utility import hmac
from utility.authentication_tools import check_resource_availability
-from utility.tools import GN2_BASE_URL
+from utility.tools import get_setting
from utility.type_checking import is_str
@@ -411,7 +411,7 @@ def get_alias_terms(symbol, species):
filtered_aliases = []
response = requests.get(
- GN2_BASE_URL + "/gn3/gene/aliases/" + symbol_string)
+ get_setting(app, "GN2_BASE_URL") + "/gn3/gene/aliases/" + symbol_string)
if response:
alias_list = json.loads(response.content)
diff --git a/wqflask/wqflask/show_trait/show_trait.py b/wqflask/wqflask/show_trait/show_trait.py
index ae5e1ebb..c8b4d199 100644
--- a/wqflask/wqflask/show_trait/show_trait.py
+++ b/wqflask/wqflask/show_trait/show_trait.py
@@ -10,6 +10,7 @@ from collections import OrderedDict
import numpy as np
import scipy.stats as ss
+from flask import current_app as app
from wqflask.database import database_connection
@@ -19,7 +20,6 @@ from base.trait import create_trait
from base import data_set
from utility import helper_functions
from utility.tools import locate_ignore_error
-from utility.tools import GN_PROXY_URL
from utility.redis_tools import get_redis_conn, get_resource_id
from gn3.authentication import get_highest_user_access_role
@@ -753,7 +753,7 @@ def get_genotype_scales(genofiles):
def get_scales_from_genofile(file_location):
- geno_path = locate_ignore_error(file_location, 'genotype')
+ geno_path = locate_ignore_error(app, file_location, 'genotype')
# ZS: This is just to allow the code to run when
if not geno_path:
return [["physic", "Mb"]]
diff --git a/wqflask/wqflask/user_login.py b/wqflask/wqflask/user_login.py
index ae61edb0..4f92723c 100644
--- a/wqflask/wqflask/user_login.py
+++ b/wqflask/wqflask/user_login.py
@@ -21,7 +21,7 @@ from utility.redis_tools import is_redis_available, get_redis_conn, get_user_id,
Redis = get_redis_conn()
from smtplib import SMTP
-from utility.tools import SMTP_CONNECT, SMTP_USERNAME, SMTP_PASSWORD, LOG_SQL_ALCHEMY, GN2_BRANCH_URL
+from utility.tools import get_setting, get_setting_bool
THREE_DAYS = 60 * 60 * 24 * 3
@@ -116,13 +116,13 @@ def send_email(toaddr, msg, fromaddr="no-reply@genenetwork.org"):
'UNKNOWN' TLS is used
"""
- if SMTP_USERNAME == 'UNKNOWN':
- server = SMTP(SMTP_CONNECT)
+ if get_setting(app, "SMTP_USERNAME") == 'UNKNOWN':
+ server = SMTP(get_setting(app, "SMTP_CONNECT"))
server.sendmail(fromaddr, toaddr, msg)
else:
- server = SMTP(SMTP_CONNECT)
+ server = SMTP(get_setting(app, "SMTP_CONNECT"))
server.starttls()
- server.login(SMTP_USERNAME, SMTP_PASSWORD)
+ server.login(get_setting(app, "SMTP_USERNAME"), get_setting(app, "SMTP_PASSWORD"))
server.sendmail(fromaddr, toaddr, msg)
server.quit()
@@ -304,7 +304,7 @@ def orcid_oauth2():
"client_id": ORCID_CLIENT_ID,
"client_secret": ORCID_CLIENT_SECRET,
"grant_type": "authorization_code",
- "redirect_uri": GN2_BRANCH_URL + "n/login/orcid_oauth2",
+ "redirect_uri": get_setting(app, "GN2_BRANCH_URL") + "n/login/orcid_oauth2",
"code": code
}
diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py
index e7a32d34..6282a4b5 100644
--- a/wqflask/wqflask/views.py
+++ b/wqflask/wqflask/views.py
@@ -82,22 +82,12 @@ from wqflask.oauth2.client import no_token_get
from wqflask.oauth2.request_utils import process_error
from utility import temp_data
-from utility.tools import TEMPDIR
-from utility.tools import USE_REDIS
-from utility.tools import REDIS_URL
-from utility.tools import GN_SERVER_URL
-from utility.tools import GN3_LOCAL_URL
-from utility.tools import GN_VERSION
-from utility.tools import JS_TWITTER_POST_FETCHER_PATH
-from utility.tools import JS_GUIX_PATH
+from utility.tools import get_setting, get_setting_bool
from utility.helper_functions import get_species_groups
from utility.redis_tools import get_redis_conn
import utility.hmac as hmac
-from base.webqtlConfig import TMPDIR
-from base.webqtlConfig import GENERATED_IMAGE_DIR
-
from wqflask.database import database_connection
import jobs.jobs as jobs
@@ -138,7 +128,7 @@ def handle_generic_exceptions(e):
resp = make_response(render_template("error.html", message=err_msg,
stack={formatted_lines},
error_image=animation,
- version=GN_VERSION))
+ version=get_setting(app, 'GN_VERSION')))
resp.set_cookie(err_msg[:32], animation)
return resp
@@ -152,8 +142,8 @@ def no_access_page():
def index_page():
anon_id = session_info()["anon_id"]
def __render__(colls):
- return render_template("index_page.html", version=GN_VERSION,
- gn_server_url=GN_SERVER_URL,
+ return render_template("index_page.html", version=get_setting(app, 'GN_VERSION'),
+ gn_server_url=get_setting(app, 'GN_SERVER_URL'),
anon_collections=(
colls if user_logged_in() else []),
anon_id=anon_id)
@@ -167,7 +157,7 @@ def index_page():
@app.route("/tmp/<img_path>")
def tmp_page(img_path):
initial_start_vars = request.form
- imgfile = open(GENERATED_IMAGE_DIR + img_path, 'rb')
+ imgfile = open(get_setting(app, 'GENERATED_IMAGE_DIR') + img_path, 'rb')
imgdata = imgfile.read()
imgB64 = base64.b64encode(imgdata)
bytesarray = array.array('B', imgB64)
@@ -177,7 +167,7 @@ def tmp_page(img_path):
@app.route("/js/<path:filename>")
def js(filename):
- js_path = JS_GUIX_PATH
+ js_path = get_setting(app, 'JS_GUIX_PATH')
name = filename
if 'js_alt/' in filename:
js_path = js_path.replace('genenetwork2/javascript', 'javascript')
@@ -187,7 +177,7 @@ def js(filename):
@app.route("/css/<path:filename>")
def css(filename):
- js_path = JS_GUIX_PATH
+ js_path = get_setting(app, 'JS_GUIX_PATH')
name = filename
if 'js_alt/' in filename:
js_path = js_path.replace('genenetwork2/javascript', 'javascript')
@@ -197,13 +187,13 @@ def css(filename):
@app.route("/twitter/<path:filename>")
def twitter(filename):
- return send_from_directory(JS_TWITTER_POST_FETCHER_PATH, filename)
+ return send_from_directory(get_setting(app, 'JS_TWITTER_POST_FETCHER_PATH'), filename)
@app.route("/search", methods=('GET',))
def search_page():
result = None
- if USE_REDIS:
+ if get_setting_bool(app, 'USE_REDIS'):
key = "search_results:v1:" + \
json.dumps(request.args, sort_keys=True)
result = Redis.get(key)
@@ -211,7 +201,7 @@ def search_page():
result = pickle.loads(result)
result = SearchResultPage(request.args).__dict__
valid_search = result['search_term_exists']
- if USE_REDIS and valid_search:
+ if get_setting_bool(app, 'USE_REDIS') and valid_search:
# Redis.set(key, pickle.dumps(result, pickle.HIGHEST_PROTOCOL))
Redis.expire(key, 60 * 60)
@@ -277,7 +267,7 @@ def docedit():
@app.route('/generated/<filename>')
def generated_file(filename):
- return send_from_directory(GENERATED_IMAGE_DIR, filename)
+ return send_from_directory(get_setting(app, 'GENERATED_IMAGE_DIR'), filename)
@app.route("/help")
@@ -351,8 +341,8 @@ def submit_trait_form():
return render_template(
"submit_trait.html",
species_and_groups=species_and_groups,
- gn_server_url=GN_SERVER_URL,
- version=GN_VERSION)
+ gn_server_url=get_setting(app, 'GN_SERVER_URL'),
+ version=get_setting(app, 'GN_VERSION'))
@app.route("/create_temp_trait", methods=('POST',))
@@ -904,11 +894,11 @@ def __handle_correlation_error__(exc):
@app.route("/corr_compute", methods=('POST', 'GET'))
def corr_compute_page():
- with Redis.from_url(REDIS_URL, decode_responses=True) as rconn:
+ with Redis.from_url(get_setting(app, 'REDIS_URL'), decode_responses=True) as rconn:
if request.method == "POST":
request_received = datetime.datetime.utcnow()
filename=hmac.hmac_creation(f"request_form_{request_received.isoformat()}")
- filepath = f"{TMPDIR}{filename}"
+ filepath = f"{get_setting(app, 'TMPDIR')}{filename}"
with open(filepath, "wb") as pfile:
pickle.dump(request.form, pfile, protocol=pickle.HIGHEST_PROTOCOL)
job_id = jobs.queue(
@@ -919,7 +909,7 @@ def corr_compute_page():
"request_received_time": request_received.isoformat(),
"status": "queued"
})
- jobs.run(job_id, REDIS_URL)
+ jobs.run(job_id, get_setting(app, 'REDIS_URL'))
return redirect(url_for("corr_compute_page", job_id=str(job_id)))
@@ -1030,7 +1020,7 @@ def browser_inputs():
filename = request.args['filename']
- with open("{}/gn2/".format(TEMPDIR) + filename + ".json", "r") as the_file:
+ with open("{}/gn2/".format(get_setting(app, 'TEMPDIR')) + filename + ".json", "r") as the_file:
file_contents = json.load(the_file)
return flask.jsonify(file_contents)
@@ -1086,7 +1076,7 @@ def display_generif_page(symbol):
"""Fetch GeneRIF metadata from GN3 and display it"""
entries = requests.get(
urljoin(
- GN3_LOCAL_URL,
+ get_setting(app, 'GN3_LOCAL_URL'),
f"/api/metadata/genewiki/{symbol}"
)
).json()
@@ -1101,7 +1091,7 @@ def display_generif_page(symbol):
def get_dataset(name):
metadata = requests.get(
urljoin(
- GN3_LOCAL_URL,
+ get_setting(app, 'GN3_LOCAL_URL'),
f"/api/metadata/dataset/{name}")
).json()
float_p = ""
@@ -1124,7 +1114,7 @@ def get_dataset(name):
def get_publication(name):
metadata = requests.get(
urljoin(
- GN3_LOCAL_URL,
+ get_setting(app, 'GN3_LOCAL_URL'),
f"/api/metadata/publication/{name}")
).json()
return render_template(
@@ -1137,7 +1127,7 @@ def get_publication(name):
def get_phenotype(name):
metadata = requests.get(
urljoin(
- GN3_LOCAL_URL,
+ get_setting(app, 'GN3_LOCAL_URL'),
f"/api/metadata/phenotype/{name}")
).json()
return render_template(
@@ -1150,7 +1140,7 @@ def get_phenotype(name):
def get_genotype(name):
metadata = requests.get(
urljoin(
- GN3_LOCAL_URL,
+ get_setting(app, 'GN3_LOCAL_URL'),
f"/api/metadata/genotype/{name}")
).json()
return render_template(
diff --git a/wqflask/wqflask/wgcna/gn3_wgcna.py b/wqflask/wqflask/wgcna/gn3_wgcna.py
index ab7fe46c..736dfd9f 100644
--- a/wqflask/wqflask/wgcna/gn3_wgcna.py
+++ b/wqflask/wqflask/wgcna/gn3_wgcna.py
@@ -5,8 +5,10 @@ and process data to be rendered by datatables
import requests
from types import SimpleNamespace
+from flask import current_app as app
+
+from utility.tools import get_setting
from utility.helper_functions import get_trait_db_obs
-from utility.tools import GN3_LOCAL_URL
def fetch_trait_data(requestform):
@@ -74,7 +76,7 @@ def process_image(response):
def run_wgcna(form_data):
"""function to run wgcna"""
- wgcna_api = f"{GN3_LOCAL_URL}/api/wgcna/run_wgcna"
+ wgcna_api = f"{get_setting(app, 'GN3_LOCAL_URL')}/api/wgcna/run_wgcna"
trait_dataset = fetch_trait_data(form_data)
form_data["minModuleSize"] = int(form_data["MinModuleSize"])