aboutsummaryrefslogtreecommitdiff
path: root/wqflask
diff options
context:
space:
mode:
Diffstat (limited to 'wqflask')
-rw-r--r--wqflask/base/data_set.py6
-rw-r--r--wqflask/wqflask/correlation/show_corr_results.py21
2 files changed, 3 insertions, 24 deletions
diff --git a/wqflask/base/data_set.py b/wqflask/base/data_set.py
index 70c58136..768ad49b 100644
--- a/wqflask/base/data_set.py
+++ b/wqflask/base/data_set.py
@@ -754,8 +754,8 @@ class DataSet:
# Postgres doesn't have that limit, so we can get rid of this after we transition
chunk_size = 50
number_chunks = int(math.ceil(len(sample_ids) / chunk_size))
- cached_results = fetch_cached_results(self.name, self.type)
- # cached_results = None
+ # cached_results = fetch_cached_results(self.name, self.type)
+ cached_results = None
if cached_results is None:
trait_sample_data = []
for sample_ids_step in chunks.divide_into_chunks(sample_ids, number_chunks):
@@ -800,8 +800,6 @@ class DataSet:
results = g.db.execute(query).fetchall()
trait_sample_data.append([list(result) for result in results])
- cache_dataset_results(
- self.name, self.type, trait_sample_data)
else:
trait_sample_data = cached_results
diff --git a/wqflask/wqflask/correlation/show_corr_results.py b/wqflask/wqflask/correlation/show_corr_results.py
index 42010a1e..55915a74 100644
--- a/wqflask/wqflask/correlation/show_corr_results.py
+++ b/wqflask/wqflask/correlation/show_corr_results.py
@@ -86,30 +86,17 @@ def correlation_json_for_table(correlation_data, this_trait, this_dataset, targe
corr_results = correlation_data['correlation_results']
results_list = []
- file_name = f"{target_dataset['name']}_metadata.json"
-
- file_path = os.path.join(TMPDIR, file_name)
- new_traits_metadata = {}
-
- try:
- with open(file_path,"r+") as file_handler:
- dataset_metadata = json.load(file_handler)
-
- except FileNotFoundError:
- Path(file_path).touch(exist_ok=True)
- dataset_metadata = {}
for i, trait_dict in enumerate(corr_results):
trait_name = list(trait_dict.keys())[0]
trait = trait_dict[trait_name]
- target_trait = dataset_metadata.get(trait_name)
+ target_trait = None
if target_trait is None:
target_trait_ob = create_trait(dataset=target_dataset_ob,
name=trait_name,
get_qtl_info=True)
target_trait = jsonable(target_trait_ob, target_dataset_ob)
- new_traits_metadata[trait_name] = target_trait
if target_trait['view'] == False:
continue
results_dict = {}
@@ -184,12 +171,6 @@ def correlation_json_for_table(correlation_data, this_trait, this_dataset, targe
results_list.append(results_dict)
-
- if bool(new_traits_metadata):
- # that means new traits exists
- dataset_metadata.update(new_traits_metadata)
- with open(file_path,"w+") as file_handler:
- json.dump(dataset_metadata, file_handler)
return json.dumps(results_list)