diff options
Diffstat (limited to 'wqflask')
-rw-r--r-- | wqflask/base/data_set.py | 42 | ||||
-rw-r--r-- | wqflask/db/call.py | 12 | ||||
-rw-r--r-- | wqflask/wqflask/views.py | 1 |
3 files changed, 36 insertions, 19 deletions
diff --git a/wqflask/base/data_set.py b/wqflask/base/data_set.py index 321dbf3f..a75b517f 100644 --- a/wqflask/base/data_set.py +++ b/wqflask/base/data_set.py @@ -51,8 +51,9 @@ from maintenance import get_group_samplelists from MySQLdb import escape_string as escape from pprint import pformat as pf from db.gn_server import menu_main +from db.call import fetchall -from utility.tools import USE_GN_SERVER +from utility.tools import USE_GN_SERVER, USE_REDIS from utility.logger import getLogger logger = getLogger(__name__ ) @@ -115,14 +116,15 @@ Publish or ProbeSet. E.g. Dataset_Getter = Dataset_Types() def create_datasets_list(): - key = "all_datasets" - result = Redis.get(key) + if USE_REDIS: + key = "all_datasets" + result = Redis.get(key) - if result: - logger.debug("Cache hit!!!") - datasets = pickle.loads(result) + if result: + logger.debug("Redis cache hit") + datasets = pickle.loads(result) - else: + if result is None: datasets = list() with Bench("Creating DataSets object"): type_dict = {'Publish': 'PublishFreeze', @@ -131,7 +133,8 @@ def create_datasets_list(): for dataset_type in type_dict: query = "SELECT Name FROM {}".format(type_dict[dataset_type]) - for result in g.db.execute(query).fetchall(): + raise Exception("HELL") + for result in fetchall(query): #The query at the beginning of this function isn't #necessary here, but still would rather just reuse #it logger.debug("type: {}\tname: @@ -139,8 +142,9 @@ def create_datasets_list(): dataset = create_dataset(result.Name, dataset_type) datasets.append(dataset) - Redis.set(key, pickle.dumps(datasets, pickle.HIGHEST_PROTOCOL)) - Redis.expire(key, 60*60) + if USE_REDIS: + Redis.set(key, pickle.dumps(datasets, pickle.HIGHEST_PROTOCOL)) + Redis.expire(key, 60*60) return datasets @@ -345,8 +349,9 @@ class DatasetGroup(object): dataset_menu.append(dict(tissue=tissue_name, datasets=[(dataset, dataset_short)])) - Redis.set(key, pickle.dumps(dataset_menu, pickle.HIGHEST_PROTOCOL)) - Redis.expire(key, 60*5) + if USE_REDIS: + Redis.set(key, pickle.dumps(dataset_menu, pickle.HIGHEST_PROTOCOL)) + Redis.expire(key, 60*5) self._datasets = dataset_menu return self._datasets @@ -364,12 +369,12 @@ class DatasetGroup(object): self.parlist = [maternal, paternal] def get_samplelist(self): + result = None key = "samplelist:v2:" + self.name - #logger.debug("key is:", key) - #with Bench("Loading cache"): - result = Redis.get(key) + if USE_REDIS: + result = Redis.get(key) - if result: + if result is not None: #logger.debug("Sample List Cache hit!!!") #logger.debug("Before unjsonifying {}: {}".format(type(result), result)) self.samplelist = json.loads(result) @@ -387,8 +392,9 @@ class DatasetGroup(object): else: self.samplelist = None logger.debug("Sample list: ",self.samplelist) - Redis.set(key, json.dumps(self.samplelist)) - Redis.expire(key, 60*5) + if USE_REDIS: + Redis.set(key, json.dumps(self.samplelist)) + Redis.expire(key, 60*5) def all_samples_ordered(self): result = [] diff --git a/wqflask/db/call.py b/wqflask/db/call.py index ac9f7fe5..4cea7e66 100644 --- a/wqflask/db/call.py +++ b/wqflask/db/call.py @@ -40,6 +40,18 @@ original fetchone, but with logging) callername = stack()[2][3] return logger.sql(callername, query, helper) +def fetchall(query): + """Return row iterator by calling SQL directly (the +original fetchall, but with logging) + + """ + with Bench("SQL",LOG_SQL): + def helper(query): + res = g.db.execute(query) + return res.fetchall() + callername = stack()[2][3] + return logger.sql(callername, query, helper) + def gn_server(path): """Return JSON record by calling GN_SERVER diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py index bce1f755..1b2419d8 100644 --- a/wqflask/wqflask/views.py +++ b/wqflask/wqflask/views.py @@ -36,7 +36,6 @@ from wqflask import update_search_results from wqflask import docs from wqflask import news from base.data_set import DataSet # Used by YAML in marker_regression -from base.data_set import create_datasets_list from wqflask.show_trait import show_trait from wqflask.show_trait import export_trait_data from wqflask.heatmap import heatmap |