aboutsummaryrefslogtreecommitdiff
path: root/wqflask
diff options
context:
space:
mode:
authorzsloan2023-01-20 22:20:43 +0000
committerzsloan2023-02-28 14:18:09 -0600
commitdb30ba7d61b2fac4dabe4664da3e8288cd80b3b8 (patch)
tree60d84fe62340b463cd3638814e865ff05e60d85a /wqflask
parent0d2d14f7d4b21c24126f59fb2cda02314134041b (diff)
downloadgenenetwork2-db30ba7d61b2fac4dabe4664da3e8288cd80b3b8.tar.gz
Move the code that caches mapping inputs to views.py so it can more easily include the temp_uuid (plus it's more consistent with the way caching has worked elsewhere)
Also changed the default time from two months to one month
Diffstat (limited to 'wqflask')
-rw-r--r--wqflask/wqflask/marker_regression/run_mapping.py13
-rw-r--r--wqflask/wqflask/views.py20
2 files changed, 19 insertions, 14 deletions
diff --git a/wqflask/wqflask/marker_regression/run_mapping.py b/wqflask/wqflask/marker_regression/run_mapping.py
index 6edf964e..9161f283 100644
--- a/wqflask/wqflask/marker_regression/run_mapping.py
+++ b/wqflask/wqflask/marker_regression/run_mapping.py
@@ -3,7 +3,6 @@ from base import data_set # import create_dataset
from pprint import pformat as pf
-import hashlib
import string
import math
from decimal import Decimal
@@ -45,16 +44,6 @@ Redis = get_redis_conn()
class RunMapping:
def __init__(self, start_vars, temp_uuid):
-
- # Get hash of inputs (as JSON) for sharing results
- inputs_json = json.dumps(start_vars, sort_keys=True)
- dhash = hashlib.md5()
- dhash.update(inputs_json.encode())
- self.hash_of_inputs = dhash.hexdigest()
-
- # Just store for one hour on initial load; will be stored for longer if user clicks Share
- Redis.set(self.hash_of_inputs, inputs_json, ex=60*60)
-
helper_functions.get_species_dataset_trait(self, start_vars)
# needed to pass temp_uuid to gn1 mapping code (marker_regression_gn1.py)
@@ -65,6 +54,8 @@ class RunMapping:
self.temp_trait = "True"
self.group = self.dataset.group.name
+ self.hash_of_inputs = start_vars['hash_of_inputs']
+
self.json_data = {}
self.json_data['lodnames'] = ['lod.hk']
diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py
index c48965fc..f2d20eef 100644
--- a/wqflask/wqflask/views.py
+++ b/wqflask/wqflask/views.py
@@ -4,6 +4,7 @@ import base64
import csv
import datetime
import flask
+import hashlib
import io # Todo: Use cStringIO?
import json
@@ -680,10 +681,21 @@ def loading_page():
@app.route("/run_mapping", methods=('POST','GET'))
def mapping_results_page():
if request.method == "GET":
- initial_start_vars = json.loads(Redis.get(request.args.get("hash")))
+ hash_of_inputs = request.args.get("hash")
+ initial_start_vars = json.loads(Redis.get(hash_of_inputs))
+ initial_start_vars['hash_of_inputs'] = hash_of_inputs
else:
initial_start_vars = request.form
+ # Get hash of inputs (as JSON) for sharing results
+ inputs_json = json.dumps(initial_start_vars, sort_keys=True)
+ dhash = hashlib.md5()
+ dhash.update(inputs_json.encode())
+ hash_of_inputs = dhash.hexdigest()
+
+ # Just store for one hour on initial load; will be stored for longer if user clicks Share
+ Redis.set(hash_of_inputs, inputs_json, ex=60*60)
+
temp_uuid = initial_start_vars['temp_uuid']
wanted = (
'trait_id',
@@ -749,6 +761,8 @@ def mapping_results_page():
if key in wanted:
start_vars[key] = value
+ start_vars['hash_of_inputs'] = hash_of_inputs
+
version = "v3"
key = "mapping_results:{}:".format(
version) + json.dumps(start_vars, sort_keys=True)
@@ -783,10 +797,10 @@ def mapping_results_page():
@app.route("/cache_mapping_inputs", methods=('POST',))
def cache_mapping_inputs():
- TWO_MONTHS = 60 * 60 * 24 * 60
+ ONE_MONTH = 60 * 60 * 24 * 30
cache_id = request.form.get("inputs_hash")
inputs_json = Redis.get(cache_id)
- Redis.set(cache_id, inputs_json, ex=TWO_MONTHS)
+ Redis.set(cache_id, inputs_json, ex=ONE_MONTH)
return "Success"