aboutsummaryrefslogtreecommitdiff
path: root/gn3/api/correlation.py
diff options
context:
space:
mode:
authorAlexander Kabui2021-04-12 09:54:12 +0300
committerGitHub2021-04-12 09:54:12 +0300
commit31ac939f58bf7b6d353ced995ca395376203b25f (patch)
tree41770a0e4ec3441045fca9bc48de794e444b80ba /gn3/api/correlation.py
parent5151987063eab58b10a2dd8e831ec036df217531 (diff)
downloadgenenetwork3-31ac939f58bf7b6d353ced995ca395376203b25f.tar.gz
Integrate correlation API
- add new api for gn2-gn3 sample r integration - delete map for sample list to values - add db util file - add python msql-client dependency - add db for fetching lit correlation results - add unittests for db utils - add tests for db_utils - modify api for fetching lit correlation results - refactor Mock Database Connector and unittests - add sql url parser - add SQL URI env variable - refactor code for db utils - modify return data for lit correlation - refactor tissue correlation endpoint - replace db_instance with conn
Diffstat (limited to 'gn3/api/correlation.py')
-rw-r--r--gn3/api/correlation.py39
1 files changed, 30 insertions, 9 deletions
diff --git a/gn3/api/correlation.py b/gn3/api/correlation.py
index 53ea6a7..2339088 100644
--- a/gn3/api/correlation.py
+++ b/gn3/api/correlation.py
@@ -1,6 +1,4 @@
"""Endpoints for running correlations"""
-from unittest import mock
-
from flask import jsonify
from flask import Blueprint
from flask import request
@@ -8,11 +6,31 @@ from flask import request
from gn3.computations.correlations import compute_all_sample_correlation
from gn3.computations.correlations import compute_all_lit_correlation
from gn3.computations.correlations import compute_all_tissue_correlation
-
+from gn3.computations.correlations import map_shared_keys_to_values
+from gn3.db_utils import database_connector
correlation = Blueprint("correlation", __name__)
+@correlation.route("/sample_x/<string:corr_method>", methods=["POST"])
+def compute_sample_integration(corr_method="pearson"):
+ """temporary api to help integrate genenetwork2 to genenetwork3 """
+
+ correlation_input = request.get_json()
+
+ target_samplelist = correlation_input.get("target_samplelist")
+ target_data_values = correlation_input.get("target_dataset")
+ this_trait_data = correlation_input.get("trait_data")
+
+ results = map_shared_keys_to_values(target_samplelist, target_data_values)
+
+ correlation_results = compute_all_sample_correlation(corr_method=corr_method,
+ this_trait=this_trait_data,
+ target_dataset=results)
+
+ return jsonify(correlation_results)
+
+
@correlation.route("/sample_r/<string:corr_method>", methods=["POST"])
def compute_sample_r(corr_method="pearson"):
"""correlation endpoint for computing sample r correlations\
@@ -22,11 +40,11 @@ def compute_sample_r(corr_method="pearson"):
# xtodo move code below to compute_all_sampl correlation
this_trait_data = correlation_input.get("this_trait")
- target_datasets = correlation_input.get("target_dataset")
+ target_dataset_data = correlation_input.get("target_dataset")
correlation_results = compute_all_sample_correlation(corr_method=corr_method,
this_trait=this_trait_data,
- target_dataset=target_datasets)
+ target_dataset=target_dataset_data)
return jsonify({
"corr_results": correlation_results
@@ -39,13 +57,16 @@ def compute_lit_corr(species=None, gene_id=None):
are fetched from the database this is the only case where the db\
might be needed for actual computing of the correlation results"""
- database_instance = mock.Mock()
+ conn, _cursor_object = database_connector()
target_traits_gene_ids = request.get_json()
+ target_trait_gene_list = list(target_traits_gene_ids.items())
lit_corr_results = compute_all_lit_correlation(
- database_instance=database_instance, trait_lists=target_traits_gene_ids,
+ conn=conn, trait_lists=target_trait_gene_list,
species=species, gene_id=gene_id)
+ conn.close()
+
return jsonify(lit_corr_results)
@@ -54,10 +75,10 @@ def compute_tissue_corr(corr_method="pearson"):
"""api endpoint fr doing tissue correlation"""
tissue_input_data = request.get_json()
primary_tissue_dict = tissue_input_data["primary_tissue"]
- target_tissues_dict_list = tissue_input_data["target_tissues"]
+ target_tissues_dict = tissue_input_data["target_tissues_dict"]
results = compute_all_tissue_correlation(primary_tissue_dict=primary_tissue_dict,
- target_tissues_dict_list=target_tissues_dict_list,
+ target_tissues_data=target_tissues_dict,
corr_method=corr_method)
return jsonify(results)