diff options
author | BonfaceKilz | 2020-10-07 15:08:05 +0300 |
---|---|---|
committer | BonfaceKilz | 2020-10-09 00:01:35 +0300 |
commit | 4aa138360fd41f9d496e00aa744700cd4d8a53a6 (patch) | |
tree | a4468e558dbcc6f40289edbae92a93dc58c746ef /wqflask | |
parent | f0dd46e668a55bd024f5dee4e99cf1215a107c26 (diff) | |
download | genenetwork2-4aa138360fd41f9d496e00aa744700cd4d8a53a6.tar.gz |
Apply PEP-8 formatting
Diffstat (limited to 'wqflask')
-rw-r--r-- | wqflask/wqflask/correlation_matrix/show_corr_matrix.py | 29 | ||||
-rw-r--r-- | wqflask/wqflask/network_graph/network_graph.py | 100 | ||||
-rw-r--r-- | wqflask/wqflask/wgcna/wgcna_analysis.py | 169 |
3 files changed, 156 insertions, 142 deletions
diff --git a/wqflask/wqflask/correlation_matrix/show_corr_matrix.py b/wqflask/wqflask/correlation_matrix/show_corr_matrix.py index 49ba9e5d..c0d84aa2 100644 --- a/wqflask/wqflask/correlation_matrix/show_corr_matrix.py +++ b/wqflask/wqflask/correlation_matrix/show_corr_matrix.py @@ -22,33 +22,24 @@ import datetime import math import numpy as np import scipy - -from rpy2.robjects.packages import importr import rpy2.robjects as robjects +import utility.webqtlUtil # this is for parallel computing only. +import utility.logger -from pprint import pformat as pf +from base import data_set +from functools import reduce +from rpy2.robjects.packages import importr +from utility import webqtlUtil +from utility import helper_functions +from utility import corr_result_helpers from utility.redis_tools import get_redis_conn -from functools import reduce + +logger = utility.logger.getLogger(__name__) Redis = get_redis_conn() THIRTY_DAYS = 60 * 60 * 24 * 30 -from utility.THCell import THCell -from utility.TDCell import TDCell -from base.trait import GeneralTrait -from base import data_set -from utility import webqtlUtil, helper_functions, corr_result_helpers - -from db import webqtlDatabaseFunction -import utility.webqtlUtil #this is for parallel computing only. -from wqflask.correlation import correlation_functions -from utility.benchmark import Bench - -from flask import Flask, g, url_for - -import utility.logger -logger = utility.logger.getLogger(__name__ ) class CorrelationMatrix(object): diff --git a/wqflask/wqflask/network_graph/network_graph.py b/wqflask/wqflask/network_graph/network_graph.py index cfefe4ec..723a749f 100644 --- a/wqflask/wqflask/network_graph/network_graph.py +++ b/wqflask/wqflask/network_graph/network_graph.py @@ -1,4 +1,4 @@ -## Copyright (C) University of Tennessee Health Science Center, Memphis, TN. +# Copyright (C) University of Tennessee Health Science Center, Memphis, TN. # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License @@ -20,25 +20,19 @@ import scipy import simplejson as json -from pprint import pformat as pf - -from utility.TDCell import TDCell from base.trait import create_trait from base import data_set -from utility import webqtlUtil, helper_functions, corr_result_helpers +from utility import helper_functions +from utility import corr_result_helpers from utility.tools import GN2_BRANCH_URL -from db import webqtlDatabaseFunction -import utility.webqtlUtil #this is for parallel computing only. -from wqflask.correlation import correlation_functions - -from flask import Flask, g class NetworkGraph(object): def __init__(self, start_vars): - trait_db_list = [trait.strip() for trait in start_vars['trait_list'].split(',')] + trait_db_list = [trait.strip() + for trait in start_vars['trait_list'].split(',')] helper_functions.get_trait_db_obs(self, trait_db_list) @@ -66,7 +60,8 @@ class NetworkGraph(object): this_trait_vals.append('') self.sample_data.append(this_trait_vals) - self.lowest_overlap = 8 #ZS: Variable set to the lowest overlapping samples in order to notify user, or 8, whichever is lower (since 8 is when we want to display warning) + # ZS: Variable set to the lowest overlapping samples in order to notify user, or 8, whichever is lower (since 8 is when we want to display warning) + self.lowest_overlap = 8 self.nodes_list = [] self.edges_list = [] @@ -78,9 +73,9 @@ class NetworkGraph(object): this_sample_data = this_trait.data corr_result_row = [] - is_spearman = False #ZS: To determine if it's above or below the diagonal + is_spearman = False # ZS: To determine if it's above or below the diagonal - max_corr = 0 #ZS: Used to determine whether node should be hidden when correlation coefficient slider is used + max_corr = 0 # ZS: Used to determine whether node should be hidden when correlation coefficient slider is used for target in self.trait_list: target_trait = target[0] @@ -99,20 +94,23 @@ class NetworkGraph(object): this_trait_vals.append(sample_value) target_vals.append(target_sample_value) - this_trait_vals, target_vals, num_overlap = corr_result_helpers.normalize_values(this_trait_vals, target_vals) + this_trait_vals, target_vals, num_overlap = corr_result_helpers.normalize_values( + this_trait_vals, target_vals) if num_overlap < self.lowest_overlap: self.lowest_overlap = num_overlap if num_overlap == 0: continue else: - pearson_r, pearson_p = scipy.stats.pearsonr(this_trait_vals, target_vals) + pearson_r, pearson_p = scipy.stats.pearsonr( + this_trait_vals, target_vals) if is_spearman == False: sample_r, sample_p = pearson_r, pearson_p if sample_r == 1: continue else: - sample_r, sample_p = scipy.stats.spearmanr(this_trait_vals, target_vals) + sample_r, sample_p = scipy.stats.spearmanr( + this_trait_vals, target_vals) if -1 <= sample_r < -0.7: color = "#0000ff" @@ -130,44 +128,44 @@ class NetworkGraph(object): color = "#ffa500" width = 2 elif 0.7 <= sample_r <= 1: - color = "#ff0000" - width = 3 + color = "#ff0000" + width = 3 else: color = "#000000" - width = 0 + width = 0 if abs(sample_r) > max_corr: max_corr = abs(sample_r) - edge_data = {'id' : str(this_trait.name) + '_to_' + str(target_trait.name), - 'source' : str(this_trait.name) + ":" + str(this_trait.dataset.name), - 'target' : str(target_trait.name) + ":" + str(target_trait.dataset.name), - 'correlation' : round(sample_r, 3), - 'abs_corr' : abs(round(sample_r, 3)), - 'p_value' : round(sample_p, 3), - 'overlap' : num_overlap, - 'color' : color, - 'width' : width } + edge_data = {'id': str(this_trait.name) + '_to_' + str(target_trait.name), + 'source': str(this_trait.name) + ":" + str(this_trait.dataset.name), + 'target': str(target_trait.name) + ":" + str(target_trait.dataset.name), + 'correlation': round(sample_r, 3), + 'abs_corr': abs(round(sample_r, 3)), + 'p_value': round(sample_p, 3), + 'overlap': num_overlap, + 'color': color, + 'width': width} - edge_dict = { 'data' : edge_data } + edge_dict = {'data': edge_data} self.edges_list.append(edge_dict) if trait_db[1].type == "ProbeSet": - node_dict = { 'data' : {'id' : str(this_trait.name) + ":" + str(this_trait.dataset.name), - 'label' : this_trait.symbol, - 'symbol' : this_trait.symbol, - 'geneid' : this_trait.geneid, - 'omim' : this_trait.omim, - 'max_corr' : max_corr } } + node_dict = {'data': {'id': str(this_trait.name) + ":" + str(this_trait.dataset.name), + 'label': this_trait.symbol, + 'symbol': this_trait.symbol, + 'geneid': this_trait.geneid, + 'omim': this_trait.omim, + 'max_corr': max_corr}} elif trait_db[1].type == "Publish": - node_dict = { 'data' : {'id' : str(this_trait.name) + ":" + str(this_trait.dataset.name), - 'label' : this_trait.name, - 'max_corr' : max_corr } } + node_dict = {'data': {'id': str(this_trait.name) + ":" + str(this_trait.dataset.name), + 'label': this_trait.name, + 'max_corr': max_corr}} else: - node_dict = { 'data' : {'id' : str(this_trait.name) + ":" + str(this_trait.dataset.name), - 'label' : this_trait.name, - 'max_corr' : max_corr } } + node_dict = {'data': {'id': str(this_trait.name) + ":" + str(this_trait.dataset.name), + 'label': this_trait.name, + 'max_corr': max_corr}} self.nodes_list.append(node_dict) self.elements = json.dumps(self.nodes_list + self.edges_list) @@ -177,13 +175,13 @@ class NetworkGraph(object): for sample in self.all_sample_list: groups.append(1) - self.js_data = dict(traits = [trait.name for trait in self.traits], - groups = groups, - cols = list(range(len(self.traits))), - rows = list(range(len(self.traits))), - samples = self.all_sample_list, - sample_data = self.sample_data, - elements = self.elements,) + self.js_data = dict(traits=[trait.name for trait in self.traits], + groups=groups, + cols=list(range(len(self.traits))), + rows=list(range(len(self.traits))), + samples=self.all_sample_list, + sample_data=self.sample_data, + elements=self.elements,) def get_trait_db_obs(self, trait_db_list): self.trait_list = [] @@ -193,6 +191,6 @@ class NetworkGraph(object): trait_name, dataset_name = trait_db.split(":") dataset_ob = data_set.create_dataset(dataset_name) trait_ob = create_trait(dataset=dataset_ob, - name=trait_name, - cellid=None) + name=trait_name, + cellid=None) self.trait_list.append((trait_ob, dataset_ob)) diff --git a/wqflask/wqflask/wgcna/wgcna_analysis.py b/wqflask/wqflask/wgcna/wgcna_analysis.py index d79ad6df..0afe7e82 100644 --- a/wqflask/wqflask/wgcna/wgcna_analysis.py +++ b/wqflask/wqflask/wgcna/wgcna_analysis.py @@ -1,122 +1,146 @@ # WGCNA analysis for GN2 # Author / Maintainer: Danny Arends <Danny.Arends@gmail.com> import sys -from numpy import * -import scipy as sp # SciPy import rpy2.robjects as ro # R Objects import rpy2.rinterface as ri +import array +from numpy import * from base.webqtlConfig import GENERATED_IMAGE_DIR -from utility import webqtlUtil # Random number for the image - -import base64 -import array +from rpy2.robjects.packages import importr +from utility import webqtlUtil # Random number for the image from utility import helper_functions -from rpy2.robjects.packages import importr utils = importr("utils") -## Get pointers to some common R functions -r_library = ro.r["library"] # Map the library function -r_options = ro.r["options"] # Map the options function -r_read_csv = ro.r["read.csv"] # Map the read.csv function -r_dim = ro.r["dim"] # Map the dim function -r_c = ro.r["c"] # Map the c function -r_cat = ro.r["cat"] # Map the cat function -r_paste = ro.r["paste"] # Map the paste function -r_unlist = ro.r["unlist"] # Map the unlist function -r_unique = ro.r["unique"] # Map the unique function -r_length = ro.r["length"] # Map the length function -r_unlist = ro.r["unlist"] # Map the unlist function -r_list = ro.r.list # Map the list function -r_matrix = ro.r.matrix # Map the matrix function -r_seq = ro.r["seq"] # Map the seq function -r_table = ro.r["table"] # Map the table function -r_names = ro.r["names"] # Map the names function -r_sink = ro.r["sink"] # Map the sink function -r_is_NA = ro.r["is.na"] # Map the is.na function -r_file = ro.r["file"] # Map the file function -r_png = ro.r["png"] # Map the png function for plotting -r_dev_off = ro.r["dev.off"] # Map the dev.off function +# Get pointers to some common R functions +r_library = ro.r["library"] # Map the library function +r_options = ro.r["options"] # Map the options function +r_read_csv = ro.r["read.csv"] # Map the read.csv function +r_dim = ro.r["dim"] # Map the dim function +r_c = ro.r["c"] # Map the c function +r_cat = ro.r["cat"] # Map the cat function +r_paste = ro.r["paste"] # Map the paste function +r_unlist = ro.r["unlist"] # Map the unlist function +r_unique = ro.r["unique"] # Map the unique function +r_length = ro.r["length"] # Map the length function +r_unlist = ro.r["unlist"] # Map the unlist function +r_list = ro.r.list # Map the list function +r_matrix = ro.r.matrix # Map the matrix function +r_seq = ro.r["seq"] # Map the seq function +r_table = ro.r["table"] # Map the table function +r_names = ro.r["names"] # Map the names function +r_sink = ro.r["sink"] # Map the sink function +r_is_NA = ro.r["is.na"] # Map the is.na function +r_file = ro.r["file"] # Map the file function +r_png = ro.r["png"] # Map the png function for plotting +r_dev_off = ro.r["dev.off"] # Map the dev.off function + class WGCNA(object): def __init__(self): print("Initialization of WGCNA") #log = r_file("/tmp/genenetwork_wcgna.log", open = "wt") - #r_sink(log) # Uncomment the r_sink() commands to log output from stdout/stderr to a file + # r_sink(log) # Uncomment the r_sink() commands to log output from stdout/stderr to a file #r_sink(log, type = "message") - r_library("WGCNA") # Load WGCNA - Should only be done once, since it is quite expensive - r_options(stringsAsFactors = False) + # Load WGCNA - Should only be done once, since it is quite expensive + r_library("WGCNA") + r_options(stringsAsFactors=False) print("Initialization of WGCNA done, package loaded in R session") - self.r_enableWGCNAThreads = ro.r["enableWGCNAThreads"] # Map the enableWGCNAThreads function - self.r_pickSoftThreshold = ro.r["pickSoftThreshold"] # Map the pickSoftThreshold function - self.r_blockwiseModules = ro.r["blockwiseModules"] # Map the blockwiseModules function - self.r_labels2colors = ro.r["labels2colors"] # Map the labels2colors function - self.r_plotDendroAndColors = ro.r["plotDendroAndColors"] # Map the plotDendroAndColors function + # Map the enableWGCNAThreads function + self.r_enableWGCNAThreads = ro.r["enableWGCNAThreads"] + # Map the pickSoftThreshold function + self.r_pickSoftThreshold = ro.r["pickSoftThreshold"] + # Map the blockwiseModules function + self.r_blockwiseModules = ro.r["blockwiseModules"] + # Map the labels2colors function + self.r_labels2colors = ro.r["labels2colors"] + # Map the plotDendroAndColors function + self.r_plotDendroAndColors = ro.r["plotDendroAndColors"] print("Obtained pointers to WGCNA functions") def run_analysis(self, requestform): print("Starting WGCNA analysis on dataset") - self.r_enableWGCNAThreads() # Enable multi threading - self.trait_db_list = [trait.strip() for trait in requestform['trait_list'].split(',')] - print(("Retrieved phenotype data from database", requestform['trait_list'])) + # Enable multi threading + self.r_enableWGCNAThreads() + self.trait_db_list = [trait.strip() + for trait in requestform['trait_list'].split(',')] + print(("Retrieved phenotype data from database", + requestform['trait_list'])) helper_functions.get_trait_db_obs(self, self.trait_db_list) self.input = {} # self.input contains the phenotype values we need to send to R - strains = [] # All the strains we have data for (contains duplicates) - traits = [] # All the traits we have data for (should not contain duplicates) + # All the strains we have data for (contains duplicates) + strains = [] + # All the traits we have data for (should not contain duplicates) + traits = [] for trait in self.trait_list: traits.append(trait[0].name) self.input[trait[0].name] = {} for strain in trait[0].data: strains.append(strain) - self.input[trait[0].name][strain] = trait[0].data[strain].value + self.input[trait[0].name][strain] = trait[0].data[strain].value # Transfer the load data from python to R - uStrainsR = r_unique(ro.Vector(strains)) # Unique strains in R vector + # Unique strains in R vector + uStrainsR = r_unique(ro.Vector(strains)) uTraitsR = r_unique(ro.Vector(traits)) # Unique traits in R vector r_cat("The number of unique strains:", r_length(uStrainsR), "\n") r_cat("The number of unique traits:", r_length(uTraitsR), "\n") # rM is the datamatrix holding all the data in R /rows = strains columns = traits - rM = ro.r.matrix(ri.NA_Real, nrow=r_length(uStrainsR), ncol=r_length(uTraitsR), dimnames = r_list(uStrainsR, uTraitsR)) + rM = ro.r.matrix(ri.NA_Real, nrow=r_length(uStrainsR), ncol=r_length( + uTraitsR), dimnames=r_list(uStrainsR, uTraitsR)) for t in uTraitsR: - trait = t[0] # R uses vectors every single element is a vector + # R uses vectors every single element is a vector + trait = t[0] for s in uStrainsR: - strain = s[0] # R uses vectors every single element is a vector + # R uses vectors every single element is a vector + strain = s[0] #DEBUG: print(trait, strain, " in python: ", self.input[trait].get(strain), "in R:", rM.rx(strain,trait)[0]) - rM.rx[strain, trait] = self.input[trait].get(strain) # Update the matrix location + rM.rx[strain, trait] = self.input[trait].get( + strain) # Update the matrix location sys.stdout.flush() self.results = {} - self.results['nphe'] = r_length(uTraitsR)[0] # Number of phenotypes/traits - self.results['nstr'] = r_length(uStrainsR)[0] # Number of strains + # Number of phenotypes/traits + self.results['nphe'] = r_length(uTraitsR)[0] + self.results['nstr'] = r_length( + uStrainsR)[0] # Number of strains self.results['phenotypes'] = uTraitsR # Traits used - self.results['strains'] = uStrainsR # Strains used in the analysis - self.results['requestform'] = requestform # Store the user specified parameters for the output page + # Strains used in the analysis + self.results['strains'] = uStrainsR + # Store the user specified parameters for the output page + self.results['requestform'] = requestform # Calculate soft threshold if the user specified the SoftThreshold variable if requestform.get('SoftThresholds') is not None: - powers = [int(threshold.strip()) for threshold in requestform['SoftThresholds'].rstrip().split(",")] - rpow = r_unlist(r_c(powers)) - print(("SoftThresholds: {} == {}".format(powers, rpow))) - self.sft = self.r_pickSoftThreshold(rM, powerVector = rpow, verbose = 5) - - print(("PowerEstimate: {}".format(self.sft[0]))) - self.results['PowerEstimate'] = self.sft[0] - if self.sft[0][0] is ri.NA_Integer: - print("No power is suitable for the analysis, just use 1") - self.results['Power'] = 1 # No power could be estimated - else: - self.results['Power'] = self.sft[0][0] # Use the estimated power + powers = [int(threshold.strip()) + for threshold in requestform['SoftThresholds'].rstrip().split(",")] + rpow = r_unlist(r_c(powers)) + print(("SoftThresholds: {} == {}".format(powers, rpow))) + self.sft = self.r_pickSoftThreshold( + rM, powerVector=rpow, verbose=5) + + print(("PowerEstimate: {}".format(self.sft[0]))) + self.results['PowerEstimate'] = self.sft[0] + if self.sft[0][0] is ri.NA_Integer: + print("No power is suitable for the analysis, just use 1") + # No power could be estimated + self.results['Power'] = 1 + else: + # Use the estimated power + self.results['Power'] = self.sft[0][0] else: - # The user clicked a button, so no soft threshold selection - self.results['Power'] = requestform.get('Power') # Use the power value the user gives + # The user clicked a button, so no soft threshold selection + # Use the power value the user gives + self.results['Power'] = requestform.get('Power') # Create the block wise modules using WGCNA - network = self.r_blockwiseModules(rM, power = self.results['Power'], TOMType = requestform['TOMtype'], minModuleSize = requestform['MinModuleSize'], verbose = 3) + network = self.r_blockwiseModules( + rM, power=self.results['Power'], TOMType=requestform['TOMtype'], minModuleSize=requestform['MinModuleSize'], verbose=3) # Save the network for the GUI self.results['network'] = network @@ -130,7 +154,8 @@ class WGCNA(object): self.results['imgloc'] = GENERATED_IMAGE_DIR + self.results['imgurl'] r_png(self.results['imgloc'], width=1000, height=600, type='cairo-png') mergedColors = self.r_labels2colors(network[1]) - self.r_plotDendroAndColors(network[5][0], mergedColors, "Module colors", dendroLabels = False, hang = 0.03, addGuide = True, guideHang = 0.05) + self.r_plotDendroAndColors(network[5][0], mergedColors, "Module colors", + dendroLabels=False, hang=0.03, addGuide=True, guideHang=0.05) r_dev_off() sys.stdout.flush() @@ -146,11 +171,11 @@ class WGCNA(object): print("Processing WGCNA output") template_vars = {} template_vars["input"] = self.input - template_vars["powers"] = self.sft[1:] # Results from the soft threshold analysis + # Results from the soft threshold analysis + template_vars["powers"] = self.sft[1:] template_vars["results"] = self.results self.render_image(results) sys.stdout.flush() - #r_sink(type = "message") # This restores R output to the stdout/stderr - #r_sink() # We should end the Rpy session more or less + # r_sink(type = "message") # This restores R output to the stdout/stderr + # r_sink() # We should end the Rpy session more or less return(dict(template_vars)) - |