aboutsummaryrefslogtreecommitdiff
path: root/wqflask
diff options
context:
space:
mode:
authorZachary Sloan2013-04-18 22:36:39 +0000
committerZachary Sloan2013-04-18 22:36:39 +0000
commit5ffd0debd5ab7ee0e98def74374a8e996629f5c9 (patch)
tree74fd7f83e9d94d49f6e77a009f5d39e6081aa019 /wqflask
parentf36de42faa6565a04c344071a3a4befa60879509 (diff)
downloadgenenetwork2-5ffd0debd5ab7ee0e98def74374a8e996629f5c9.tar.gz
The plink_input is split into chunks that are stored in temp_data, but
we might decide to store it differently
Diffstat (limited to 'wqflask')
-rw-r--r--wqflask/utility/temp_data.py14
-rw-r--r--wqflask/wqflask/my_pylmm/pyLMM/lmm.py8
2 files changed, 13 insertions, 9 deletions
diff --git a/wqflask/utility/temp_data.py b/wqflask/utility/temp_data.py
index ddf2653c..60f01167 100644
--- a/wqflask/utility/temp_data.py
+++ b/wqflask/utility/temp_data.py
@@ -1,14 +1,15 @@
from __future__ import print_function, division, absolute_import
from redis import Redis
+import redis
import simplejson as json
class TempData(object):
-
- def __init__(self, temp_uuid, part=None):
+
+ def __init__(self, temp_uuid, preface="tempdata", part=None):
self.temp_uuid = temp_uuid
self.redis = Redis()
- self.key = "tempdata:{}".format(self.temp_uuid)
+ self.key = "{}:{}".format(preface, self.temp_uuid)
if part:
self.key += ":{}".format(part)
@@ -19,9 +20,12 @@ class TempData(object):
def get_all(self):
return self.redis.hgetall(self.key)
-
if __name__ == "__main__":
redis = Redis()
for key in redis.keys():
+ print("key is:", key)
+ if "plink" not in key:
+ print(" Skipping...\n")
+ continue
for field in redis.hkeys(key):
- print("{}.{}={}".format(key, field, redis.hget(key, field)))
+ print(" {}.{}={}\n".format(key, field, len(redis.hget(key, field))))
diff --git a/wqflask/wqflask/my_pylmm/pyLMM/lmm.py b/wqflask/wqflask/my_pylmm/pyLMM/lmm.py
index 8c0e0282..a6134fdd 100644
--- a/wqflask/wqflask/my_pylmm/pyLMM/lmm.py
+++ b/wqflask/wqflask/my_pylmm/pyLMM/lmm.py
@@ -89,13 +89,13 @@ def run_human(pheno_vector,
with Bench("Divide into chunks"):
results = chunks.divide_into_chunks(inputs, 64)
-
+
result_store = []
identifier = uuid.uuid4()
for part, result in enumerate(results):
- data_store = temp_data.TempData(identifier, part)
+ data_store = temp_data.TempData(identifier, "plink", part)
- data_store.store(data=pickle.dumps(result))
+ data_store.store("data", pickle.dumps(result, pickle.HIGHEST_PROTOCOL))
result_store.append(data_store)
for snp, this_id in plink_input:
@@ -103,7 +103,7 @@ def run_human(pheno_vector,
if count > 2000:
break
count += 1
-
+
percent_complete = (float(count) / total_snps) * 100
#print("percent_complete: ", percent_complete)
loading_progress.store("percent_complete", percent_complete)