aboutsummaryrefslogtreecommitdiff
path: root/wqflask
diff options
context:
space:
mode:
authorzsloan2018-03-29 15:40:56 +0000
committerzsloan2018-03-29 15:40:56 +0000
commit6ff7df2360d1a6d0461980c938809165982583e8 (patch)
tree8e17c685f96e04a32fd3e32f0252477f3c905af7 /wqflask
parente67e3a76fca0bad4796853eb58140a412922bc9c (diff)
downloadgenenetwork2-6ff7df2360d1a6d0461980c938809165982583e8.tar.gz
Fixed box plots and got them to update with changed sample data
Removed fast correlation option because it doesn't work well with gunicorn Changed the type of logger from error to debug when logging which page is being visited in views.py
Diffstat (limited to 'wqflask')
-rw-r--r--wqflask/wqflask/correlation/show_corr_results.py43
-rw-r--r--wqflask/wqflask/do_search.py1
-rw-r--r--wqflask/wqflask/static/new/javascript/show_trait.js176
-rw-r--r--wqflask/wqflask/views.py66
4 files changed, 180 insertions, 106 deletions
diff --git a/wqflask/wqflask/correlation/show_corr_results.py b/wqflask/wqflask/correlation/show_corr_results.py
index 9b048346..12f76b7d 100644
--- a/wqflask/wqflask/correlation/show_corr_results.py
+++ b/wqflask/wqflask/correlation/show_corr_results.py
@@ -236,28 +236,29 @@ class CorrelationResults(object):
self.get_sample_r_and_p_values(trait, self.target_dataset.trait_data[trait])
elif self.corr_type == "sample":
- if self.dataset.type == "ProbeSet" and cache_available:
- dataset_file = open(webqtlConfig.GENERATED_TEXT_DIR+db_filename,'r')
-
- #XZ, 01/08/2009: read the first line
- line = dataset_file.readline()
- dataset_strains = webqtlUtil.readLineCSV(line)[1:]
-
- self.this_trait_vals = []
- for item in dataset_strains:
- if item in self.sample_data:
- self.this_trait_vals.append(self.sample_data[item])
- else:
- self.this_trait_vals.append("None")
- num_overlap = len(self.this_trait_vals)
- logger.debug("DOING PARALLEL")
- self.do_parallel_correlation(db_filename, num_overlap)
- else:
- for trait, values in self.target_dataset.trait_data.iteritems():
- self.get_sample_r_and_p_values(trait, values)
+ #ZS: Commented out since parallel correlation has issues with gunicorn
+ # if self.dataset.type == "ProbeSet" and cache_available:
+ # dataset_file = open(webqtlConfig.GENERATED_TEXT_DIR+db_filename,'r')
+
+ ##XZ, 01/08/2009: read the first line
+ # line = dataset_file.readline()
+ # dataset_strains = webqtlUtil.readLineCSV(line)[1:]
+
+ # self.this_trait_vals = []
+ # for item in dataset_strains:
+ # if item in self.sample_data:
+ # self.this_trait_vals.append(self.sample_data[item])
+ # else:
+ # self.this_trait_vals.append("None")
+ # num_overlap = len(self.this_trait_vals)
+ # logger.debug("DOING PARALLEL")
+ # self.do_parallel_correlation(db_filename, num_overlap)
+ # else:
+ for trait, values in self.target_dataset.trait_data.iteritems():
+ self.get_sample_r_and_p_values(trait, values)
- self.correlation_data = collections.OrderedDict(sorted(self.correlation_data.items(),
- key=lambda t: -abs(t[1][0])))
+ self.correlation_data = collections.OrderedDict(sorted(self.correlation_data.items(),
+ key=lambda t: -abs(t[1][0])))
if self.target_dataset.type == "ProbeSet" or self.target_dataset.type == "Geno":
diff --git a/wqflask/wqflask/do_search.py b/wqflask/wqflask/do_search.py
index e1df1e63..17625474 100644
--- a/wqflask/wqflask/do_search.py
+++ b/wqflask/wqflask/do_search.py
@@ -989,3 +989,4 @@ if __name__ == "__main__":
#results = GoSearch("0045202", dataset, cursor, db_conn).run()
logger.debug("results are:", pf(results))
+ db_conn.close()
diff --git a/wqflask/wqflask/static/new/javascript/show_trait.js b/wqflask/wqflask/static/new/javascript/show_trait.js
index df10c060..117d8f12 100644
--- a/wqflask/wqflask/static/new/javascript/show_trait.js
+++ b/wqflask/wqflask/static/new/javascript/show_trait.js
@@ -250,15 +250,29 @@
};
redraw_box_plot = function() {
- var x;
- var _i, _len, _ref, data;
- _ref = _.values(root.selected_samples[root.stats_group]);
- trait_vals = [];
- for (_i = 0, _len = _ref.length; _i < _len; _i++) {
- x = _ref[_i];
- trait_vals.push(x.value);
+ var y_value_list = []
+ for (var sample_group in root.selected_samples){
+ var trait_sample_data = _.values(root.selected_samples[sample_group])
+ var trait_vals = [];
+ for (i = 0, len = trait_sample_data.length; i < len; i++) {
+ this_sample_data = trait_sample_data[i];
+ trait_vals.push(this_sample_data.value);
+ }
+ y_value_list.push(trait_vals)
+ }
+
+ if (Object.keys(js_data.sample_group_types).length > 1) {
+ var update = {
+ y: y_value_list
+ }
+ console.log("REDRAW UPDATE:", update)
+ Plotly.restyle('box_plot', update, [0, 1, 2])
+ } else {
+ var update = {
+ y: y_value_list
+ }
+ Plotly.restyle('box_plot', update)
}
- Plotly.restyle('box_plot', 'y', [trait_vals])
}
redraw_prob_plot = function() {
@@ -640,6 +654,108 @@
};
root.stats_group = 'samples_primary';
+
+ if (Object.keys(js_data.sample_group_types).length > 1) {
+ full_sample_lists = [sample_lists[0], sample_lists[1], sample_lists[0].concat(sample_lists[1])]
+ sample_group_list = [js_data.sample_group_types['samples_primary'], js_data.sample_group_types['samples_other'], js_data.sample_group_types['samples_all']]
+ } else {
+ full_sample_lists = [sample_lists[0]]
+ sample_group_list = [js_data.sample_group_types['samples_primary']]
+ }
+
+ if (full_sample_lists.length > 1) {
+ var box_layout = {
+ width: 1200,
+ height: 500,
+ margin: {
+ l: 50,
+ r: 30,
+ t: 30,
+ b: 80
+ }
+ };
+ var trace1 = {
+ y: get_sample_vals(full_sample_lists[0]),
+ type: 'box',
+ name: sample_group_list[0],
+ boxpoints: 'all',
+ jitter: 0.5,
+ whiskerwidth: 0.2,
+ fillcolor: 'cls',
+ marker: {
+ size: 2
+ },
+ line: {
+ width: 1
+ }
+ }
+ var trace2 = {
+ y: get_sample_vals(full_sample_lists[1]),
+ type: 'box',
+ name: sample_group_list[1],
+ boxpoints: 'all',
+ jitter: 0.5,
+ whiskerwidth: 0.2,
+ fillcolor: 'cls',
+ marker: {
+ size: 2
+ },
+ line: {
+ width: 1
+ }
+ }
+ var trace3 = {
+ y: get_sample_vals(full_sample_lists[2]),
+ type: 'box',
+ name: sample_group_list[2],
+ boxpoints: 'all',
+ jitter: 0.5,
+ whiskerwidth: 0.2,
+ fillcolor: 'cls',
+ marker: {
+ size: 2
+ },
+ line: {
+ width: 1
+ }
+ }
+ box_data = [trace1, trace2, trace3]
+ } else {
+ var box_layout = {
+ width: 500,
+ height: 500,
+ margin: {
+ l: 50,
+ r: 30,
+ t: 30,
+ b: 80
+ }
+ };
+ box_data = [
+ {
+ type: 'box',
+ y: get_sample_vals(full_sample_lists[0]),
+ name: sample_group_list[0],
+ boxpoints: 'all',
+ jitter: 0.5,
+ whiskerwidth: 0.2,
+ fillcolor: 'cls',
+ marker: {
+ size: 2
+ },
+ line: {
+ width: 1
+ }
+ }
+ ]
+ }
+
+ obj = {
+ data: box_data,
+ layout: box_layout
+ }
+ Plotly.newPlot('box_plot', obj);
+
// Histogram
var hist_trace = {
x: get_sample_vals(sample_lists[0]),
@@ -686,50 +802,6 @@
root.bar_layout = layout
Plotly.newPlot('bar_chart', root.bar_data, layout)
- if (Object.keys(js_data.sample_group_types).length > 1) {
- full_sample_lists = [sample_lists[0], sample_lists[1], sample_lists[0].concat(sample_lists[1])]
- sample_group_list = [js_data.sample_group_types['samples_primary'], js_data.sample_group_types['samples_other'], js_data.sample_group_types['samples_all']]
- } else {
- full_sample_lists = [sample_lists[0]]
- sample_group_list = [js_data.sample_group_types['samples_primary']]
- }
-
- data = []
- for ( var i = 0; i < full_sample_lists.length; i ++ ) {
- var box_trace = {
- type: 'box',
- y: get_sample_vals(full_sample_lists[i]),
- name: sample_group_list[i],
- boxpoints: 'all',
- jitter: 0.5,
- whiskerwidth: 0.2,
- fillcolor: 'cls',
- marker: {
- size: 2
- },
- line: {
- width: 1
- }
- };
- data.push(box_trace)
- };
- layout = {
- title: 'Box Plot',
- yaxis: {
- autorange: true,
- showgrid: true,
- zeroline: true
- },
- margin: {
- l: 50,
- r: 30,
- t: 80,
- b: 80
- }
- };
-
- Plotly.newPlot('box_plot', data, layout);
-
$('.histogram_samples_group').val(root.stats_group);
$('.histogram_samples_group').change(function() {
root.stats_group = $(this).val();
diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py
index 17a2d762..998e5302 100644
--- a/wqflask/wqflask/views.py
+++ b/wqflask/wqflask/views.py
@@ -123,7 +123,7 @@ def handle_bad_request(e):
@app.route("/")
def index_page():
logger.info("Sending index_page")
- logger.error(request.url)
+ logger.debug(request.url)
params = request.args
if 'import_collections' in params:
import_collections = params['import_collections']
@@ -141,7 +141,7 @@ def index_page():
def tmp_page(img_path):
logger.info("In tmp_page")
logger.info("img_path:", img_path)
- logger.error(request.url)
+ logger.debug(request.url)
initial_start_vars = request.form
logger.info("initial_start_vars:", initial_start_vars)
imgfile = open(GENERATED_IMAGE_DIR + img_path, 'rb')
@@ -174,7 +174,7 @@ def twitter(filename):
@app.route("/search", methods=('GET',))
def search_page():
logger.info("in search_page")
- logger.error(request.url)
+ logger.debug(request.url)
if 'info_database' in request.args:
logger.info("Going to sharing_info_page")
template_vars = sharing_info_page()
@@ -213,7 +213,7 @@ def search_page():
@app.route("/gsearch", methods=('GET',))
def gsearchact():
- logger.error(request.url)
+ logger.debug(request.url)
result = gsearch.GSearch(request.args).__dict__
type = request.args['type']
if type == "gene":
@@ -224,7 +224,7 @@ def gsearchact():
@app.route("/gsearch_updating", methods=('POST',))
def gsearch_updating():
logger.info("REQUEST ARGS:", request.values)
- logger.error(request.url)
+ logger.debug(request.url)
result = update_search_results.GSearch(request.args).__dict__
return result['results']
# type = request.args['type']
@@ -235,31 +235,31 @@ def gsearch_updating():
@app.route("/docedit")
def docedit():
- logger.error(request.url)
+ logger.debug(request.url)
doc = docs.Docs(request.args['entry'])
return render_template("docedit.html", **doc.__dict__)
@app.route('/generated/<filename>')
def generated_file(filename):
- logger.error(request.url)
+ logger.debug(request.url)
return send_from_directory(GENERATED_IMAGE_DIR,filename)
@app.route("/help")
def help():
- logger.error(request.url)
+ logger.debug(request.url)
doc = docs.Docs("help")
return render_template("docs.html", **doc.__dict__)
@app.route("/wgcna_setup", methods=('POST',))
def wcgna_setup():
logger.info("In wgcna, request.form is:", request.form) # We are going to get additional user input for the analysis
- logger.error(request.url)
+ logger.debug(request.url)
return render_template("wgcna_setup.html", **request.form) # Display them using the template
@app.route("/wgcna_results", methods=('POST',))
def wcgna_results():
logger.info("In wgcna, request.form is:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
wgcna = wgcna_analysis.WGCNA() # Start R, load the package and pointers and create the analysis
wgcnaA = wgcna.run_analysis(request.form) # Start the analysis, a wgcnaA object should be a separate long running thread
result = wgcna.process_results(wgcnaA) # After the analysis is finished store the result
@@ -268,13 +268,13 @@ def wcgna_results():
@app.route("/ctl_setup", methods=('POST',))
def ctl_setup():
logger.info("In ctl, request.form is:", request.form) # We are going to get additional user input for the analysis
- logger.error(request.url)
+ logger.debug(request.url)
return render_template("ctl_setup.html", **request.form) # Display them using the template
@app.route("/ctl_results", methods=('POST',))
def ctl_results():
logger.info("In ctl, request.form is:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
ctl = ctl_analysis.CTL() # Start R, load the package and pointers and create the analysis
ctlA = ctl.run_analysis(request.form) # Start the analysis, a ctlA object should be a separate long running thread
result = ctl.process_results(ctlA) # After the analysis is finished store the result
@@ -313,13 +313,13 @@ def environments():
@app.route("/submit_trait")
def submit_trait_form():
- logger.error(request.url)
+ logger.debug(request.url)
species_and_groups = get_species_groups()
return render_template("submit_trait.html", **{'species_and_groups' : species_and_groups, 'gn_server_url' : GN_SERVER_URL, 'version' : GN_VERSION})
@app.route("/create_temp_trait", methods=('POST',))
def create_temp_trait():
- logger.error(request.url)
+ logger.debug(request.url)
print("REQUEST.FORM:", request.form)
#template_vars = submit_trait.SubmitTrait(request.form)
@@ -332,7 +332,7 @@ def export_trait_excel():
"""Excel file consisting of the sample data from the trait data and analysis page"""
logger.info("In export_trait_excel")
logger.info("request.form:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
sample_data = export_trait_data.export_sample_table(request.form)
logger.info("sample_data - type: %s -- size: %s" % (type(sample_data), len(sample_data)))
@@ -358,7 +358,7 @@ def export_trait_csv():
"""CSV file consisting of the sample data from the trait data and analysis page"""
logger.info("In export_trait_csv")
logger.info("request.form:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
sample_data = export_trait_data.export_sample_table(request.form)
logger.info("sample_data - type: %s -- size: %s" % (type(sample_data), len(sample_data)))
@@ -379,7 +379,7 @@ def export_traits_csv():
"""CSV file consisting of the traits from the search result page"""
logger.info("In export_traits_csv")
logger.info("request.form:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
csv_data = export_traits.export_search_results_csv(request.form)
return Response(csv_data,
@@ -389,7 +389,7 @@ def export_traits_csv():
@app.route('/export_perm_data', methods=('POST',))
def export_perm_data():
"""CSV file consisting of the permutation data for the mapping results"""
- logger.error(request.url)
+ logger.debug(request.url)
num_perm = float(request.form['num_perm'])
perm_data = json.loads(request.form['perm_results'])
@@ -412,7 +412,7 @@ def export_perm_data():
@app.route("/show_temp_trait", methods=('POST',))
def show_temp_trait_page():
- logger.error(request.url)
+ logger.debug(request.url)
template_vars = show_trait.ShowTrait(request.form)
#logger.info("js_data before dump:", template_vars.js_data)
template_vars.js_data = json.dumps(template_vars.js_data,
@@ -427,7 +427,7 @@ def show_temp_trait_page():
@app.route("/show_trait")
def show_trait_page():
- logger.error(request.url)
+ logger.debug(request.url)
template_vars = show_trait.ShowTrait(request.args)
#logger.info("js_data before dump:", template_vars.js_data)
template_vars.js_data = json.dumps(template_vars.js_data,
@@ -443,7 +443,7 @@ def show_trait_page():
@app.route("/heatmap", methods=('POST',))
def heatmap_page():
logger.info("In heatmap, request.form is:", pf(request.form))
- logger.error(request.url)
+ logger.debug(request.url)
start_vars = request.form
temp_uuid = uuid.uuid4()
@@ -493,7 +493,7 @@ def mapping_results_container_page():
@app.route("/loading", methods=('POST',))
def loading_page():
- logger.error(request.url)
+ logger.debug(request.url)
initial_start_vars = request.form
logger.debug("Marker regression called with initial_start_vars:", initial_start_vars.items())
#temp_uuid = initial_start_vars['temp_uuid']
@@ -552,7 +552,7 @@ def loading_page():
def marker_regression_page():
initial_start_vars = request.form
logger.debug("Marker regression called with initial_start_vars:", initial_start_vars.items())
- logger.error(request.url)
+ logger.debug(request.url)
temp_uuid = initial_start_vars['temp_uuid']
wanted = (
'trait_id',
@@ -678,7 +678,7 @@ def marker_regression_page():
@app.route("/export_mapping_results", methods = ('POST',))
def export_mapping_results():
logger.info("request.form:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
file_path = request.form.get("results_path")
results_csv = open(file_path, "r").read()
response = Response(results_csv,
@@ -691,7 +691,7 @@ def export_mapping_results():
@app.route("/export", methods = ('POST',))
def export():
logger.info("request.form:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
svg_xml = request.form.get("data", "Invalid data")
filename = request.form.get("filename", "manhattan_plot_snp")
response = Response(svg_xml, mimetype="image/svg+xml")
@@ -702,7 +702,7 @@ def export():
def export_pdf():
import cairosvg
logger.info("request.form:", request.form)
- logger.error(request.url)
+ logger.debug(request.url)
svg_xml = request.form.get("data", "Invalid data")
logger.info("svg_xml:", svg_xml)
filename = request.form.get("filename", "interval_map_pdf")
@@ -715,7 +715,7 @@ def export_pdf():
@app.route("/network_graph", methods=('POST',))
def network_graph_page():
logger.info("In network_graph, request.form is:", pf(request.form))
- logger.error(request.url)
+ logger.debug(request.url)
start_vars = request.form
traits = [trait.strip() for trait in start_vars['trait_list'].split(',')]
if traits[0] != "":
@@ -731,7 +731,7 @@ def network_graph_page():
@app.route("/corr_compute", methods=('POST',))
def corr_compute_page():
logger.info("In corr_compute, request.form is:", pf(request.form))
- logger.error(request.url)
+ logger.debug(request.url)
#fd = webqtlFormData.webqtlFormData(request.form)
template_vars = show_corr_results.CorrelationResults(request.form)
return render_template("correlation_page.html", **template_vars.__dict__)
@@ -739,7 +739,7 @@ def corr_compute_page():
@app.route("/corr_matrix", methods=('POST',))
def corr_matrix_page():
logger.info("In corr_matrix, request.form is:", pf(request.form))
- logger.error(request.url)
+ logger.debug(request.url)
start_vars = request.form
traits = [trait.strip() for trait in start_vars['trait_list'].split(',')]
@@ -755,7 +755,7 @@ def corr_matrix_page():
@app.route("/corr_scatter_plot")
def corr_scatter_plot_page():
- logger.error(request.url)
+ logger.debug(request.url)
template_vars = corr_scatter_plot.CorrScatterPlot(request.args)
template_vars.js_data = json.dumps(template_vars.js_data,
default=json_default_handler,
@@ -764,7 +764,7 @@ def corr_scatter_plot_page():
@app.route("/submit_bnw", methods=('POST',))
def submit_bnw():
- logger.error(request.url)
+ logger.debug(request.url)
template_vars = get_bnw_input(request.form)
return render_template("empty_collection.html", **{'tool':'Correlation Matrix'})
@@ -772,7 +772,7 @@ def submit_bnw():
def sharing_info_page():
"""Info page displayed when the user clicks the "Info" button next to the dataset selection"""
logger.info("In sharing_info_page")
- logger.error(request.url)
+ logger.debug(request.url)
fd = webqtlFormData.webqtlFormData(request.args)
template_vars = SharingInfoPage.SharingInfoPage(fd)
return template_vars
@@ -780,7 +780,7 @@ def sharing_info_page():
# Take this out or secure it before putting into production
@app.route("/get_temp_data")
def get_temp_data():
- logger.error(request.url)
+ logger.debug(request.url)
temp_uuid = request.args['key']
return flask.jsonify(temp_data.TempData(temp_uuid).get_all())