about summary refs log tree commit diff
path: root/gn3
diff options
context:
space:
mode:
authorAlexander Kabui2021-04-12 16:53:48 +0300
committerAlexander Kabui2021-04-12 16:53:48 +0300
commit35f5ac0335f44923184ffe0f0a3380a9cf1859ef (patch)
tree8f1fcb15ace3574eb19bd0eafdc5b5bb0822ed09 /gn3
parent8ce82f5b6cccc015c38a728864c63c026fe6a3a0 (diff)
parent31ac939f58bf7b6d353ced995ca395376203b25f (diff)
downloadgenenetwork3-35f5ac0335f44923184ffe0f0a3380a9cf1859ef.tar.gz
fix merge conflict
Diffstat (limited to 'gn3')
-rw-r--r--gn3/api/correlation.py39
-rw-r--r--gn3/computations/correlations.py102
-rw-r--r--gn3/db_utils.py24
-rw-r--r--gn3/settings.py2
4 files changed, 131 insertions, 36 deletions
diff --git a/gn3/api/correlation.py b/gn3/api/correlation.py
index e023cbe..f28e1f5 100644
--- a/gn3/api/correlation.py
+++ b/gn3/api/correlation.py
@@ -1,6 +1,4 @@
 """Endpoints for running correlations"""
-from unittest import mock
-
 from flask import jsonify
 from flask import Blueprint
 from flask import request
@@ -8,11 +6,31 @@ from flask import request
 from gn3.computations.correlations import compute_all_sample_correlation
 from gn3.computations.correlations import compute_all_lit_correlation
 from gn3.computations.correlations import compute_all_tissue_correlation
-
+from gn3.computations.correlations import map_shared_keys_to_values
+from gn3.db_utils import database_connector
 
 correlation = Blueprint("correlation", __name__)
 
 
+@correlation.route("/sample_x/<string:corr_method>", methods=["POST"])
+def compute_sample_integration(corr_method="pearson"):
+    """temporary api to  help integrate genenetwork2  to genenetwork3 """
+
+    correlation_input = request.get_json()
+
+    target_samplelist = correlation_input.get("target_samplelist")
+    target_data_values = correlation_input.get("target_dataset")
+    this_trait_data = correlation_input.get("trait_data")
+
+    results = map_shared_keys_to_values(target_samplelist, target_data_values)
+
+    correlation_results = compute_all_sample_correlation(corr_method=corr_method,
+                                                         this_trait=this_trait_data,
+                                                         target_dataset=results)
+
+    return jsonify(correlation_results)
+
+
 @correlation.route("/sample_r/<string:corr_method>", methods=["POST"])
 def compute_sample_r(corr_method="pearson"):
     """Correlation endpoint for computing sample r correlations\
@@ -23,11 +41,11 @@ def compute_sample_r(corr_method="pearson"):
 
     # xtodo move code below to compute_all_sampl correlation
     this_trait_data = correlation_input.get("this_trait")
-    target_datasets = correlation_input.get("target_dataset")
+    target_dataset_data = correlation_input.get("target_dataset")
 
     correlation_results = compute_all_sample_correlation(corr_method=corr_method,
                                                          this_trait=this_trait_data,
-                                                         target_dataset=target_datasets)
+                                                         target_dataset=target_dataset_data)
 
     return jsonify({
         "corr_results": correlation_results
@@ -41,13 +59,16 @@ def compute_lit_corr(species=None, gene_id=None):
     might be needed for actual computing of the correlation results
     """
 
-    database_instance = mock.Mock()
+    conn, _cursor_object = database_connector()
     target_traits_gene_ids = request.get_json()
+    target_trait_gene_list = list(target_traits_gene_ids.items())
 
     lit_corr_results = compute_all_lit_correlation(
-        database_instance=database_instance, trait_lists=target_traits_gene_ids,
+        conn=conn, trait_lists=target_trait_gene_list,
         species=species, gene_id=gene_id)
 
+    conn.close()
+
     return jsonify(lit_corr_results)
 
 
@@ -56,10 +77,10 @@ def compute_tissue_corr(corr_method="pearson"):
     """Api endpoint fr doing tissue correlation"""
     tissue_input_data = request.get_json()
     primary_tissue_dict = tissue_input_data["primary_tissue"]
-    target_tissues_dict_list = tissue_input_data["target_tissues"]
+    target_tissues_dict = tissue_input_data["target_tissues_dict"]
 
     results = compute_all_tissue_correlation(primary_tissue_dict=primary_tissue_dict,
-                                             target_tissues_dict_list=target_tissues_dict_list,
+                                             target_tissues_data=target_tissues_dict,
                                              corr_method=corr_method)
 
     return jsonify(results)
diff --git a/gn3/computations/correlations.py b/gn3/computations/correlations.py
index 7a6ff11..7fb67be 100644
--- a/gn3/computations/correlations.py
+++ b/gn3/computations/correlations.py
@@ -12,10 +12,30 @@ def compute_sum(rhs: int, lhs: int) -> int:
     return rhs + lhs
 
 
+def map_shared_keys_to_values(target_sample_keys: List, target_sample_vals: dict)-> List:
+    """Function to construct target dataset data items given commoned shared\
+    keys and trait samplelist values for example given keys  >>>>>>>>>>\
+    ["BXD1", "BXD2", "BXD5", "BXD6", "BXD8", "BXD9"] and value object as\
+    "HCMA:_AT": [4.1, 5.6, 3.2, 1.1, 4.4, 2.2],TXD_AT": [6.2, 5.7, 3.6, 1.5, 4.2, 2.3]}\
+    return  results should be a list of dicts mapping the shared keys to the trait values"""
+    target_dataset_data = []
+
+    for trait_id, sample_values in target_sample_vals.items():
+        target_trait_dict = dict(zip(target_sample_keys, sample_values))
+
+        target_trait = {
+            "trait_id": trait_id,
+            "trait_sample_data": target_trait_dict
+        }
+
+        target_dataset_data.append(target_trait)
+
+    return target_dataset_data
+
+
 def normalize_values(a_values: List,
                      b_values: List) -> Tuple[List[float], List[float], int]:
     """Trim two lists of values to contain only the values they both share
-
     Given two lists of sample values, trim each list so that it contains only
     the samples that contain a value in both lists. Also returns the number of
     such samples.
@@ -175,7 +195,7 @@ def tissue_correlation_for_trait_list(
 
     """
 
-    # ax :todo assertion that lenggth one one target tissue ==primary_tissue
+    # ax :todo assertion that length one one target tissue ==primary_tissue
 
     (tissue_corr_coeffient,
      p_value) = compute_corr_p_value(primary_values=primary_tissue_vals,
@@ -192,11 +212,11 @@ def tissue_correlation_for_trait_list(
 
 
 def fetch_lit_correlation_data(
-        database,
+        conn,
         input_mouse_gene_id: Optional[str],
         gene_id: str,
         mouse_gene_id: Optional[str] = None) -> Tuple[str, float]:
-    """given input trait mouse gene id and mouse gene id fetch the lit\
+    """Given input trait mouse gene id and mouse gene id fetch the lit\
     corr_data"""
     if mouse_gene_id is not None and ";" not in mouse_gene_id:
         query = """
@@ -208,15 +228,19 @@ def fetch_lit_correlation_data(
 
         query_values = (str(mouse_gene_id), str(input_mouse_gene_id))
 
-        results = database.execute(query_formatter(query,
-                                                   *query_values)).fetchone()
+        cursor = conn.cursor()
+
+        cursor.execute(query_formatter(query,
+                                       *query_values))
+        results = cursor.fetchone()
         lit_corr_results = None
         if results is not None:
             lit_corr_results = results
         else:
-            lit_corr_results = database.execute(
-                query_formatter(query,
-                                *tuple(reversed(query_values)))).fetchone()
+            cursor = conn.cursor()
+            cursor.execute(query_formatter(query,
+                                           *tuple(reversed(query_values))))
+            lit_corr_results = cursor.fetchone()
         lit_results = (gene_id, lit_corr_results.val)\
             if lit_corr_results else (gene_id, 0)
         return lit_results
@@ -225,7 +249,7 @@ def fetch_lit_correlation_data(
 
 
 def lit_correlation_for_trait_list(
-        database,
+        conn,
         target_trait_lists: List,
         species: Optional[str] = None,
         trait_gene_id: Optional[str] = None) -> List:
@@ -233,41 +257,43 @@ def lit_correlation_for_trait_list(
     output is float for lit corr results """
     fetched_lit_corr_results = []
 
-    this_trait_mouse_gene_id = map_to_mouse_gene_id(database=database,
+    this_trait_mouse_gene_id = map_to_mouse_gene_id(conn=conn,
                                                     species=species,
                                                     gene_id=trait_gene_id)
 
-    for trait in target_trait_lists:
-        target_trait_gene_id = trait.get("gene_id")
+    for (trait_name, target_trait_gene_id) in target_trait_lists:
+        corr_results = {}
         if target_trait_gene_id:
             target_mouse_gene_id = map_to_mouse_gene_id(
-                database=database,
+                conn=conn,
                 species=species,
                 gene_id=target_trait_gene_id)
 
             fetched_corr_data = fetch_lit_correlation_data(
-                database=database,
+                conn=conn,
                 input_mouse_gene_id=this_trait_mouse_gene_id,
                 gene_id=target_trait_gene_id,
                 mouse_gene_id=target_mouse_gene_id)
 
             dict_results = dict(zip(("gene_id", "lit_corr"),
                                     fetched_corr_data))
-            fetched_lit_corr_results.append(dict_results)
+            corr_results[trait_name] = dict_results
+            fetched_lit_corr_results.append(corr_results)
 
     return fetched_lit_corr_results
 
 
 def query_formatter(query_string: str, *query_values):
-    """formatter query string given the unformatted query string\
+    """Formatter query string given the unformatted query string\
     and the respectibe values.Assumes number of placeholders is
     equal to the number of query values """
+    # xtodo escape sql queries
     results = query_string % (query_values)
 
     return results
 
 
-def map_to_mouse_gene_id(database, species: Optional[str],
+def map_to_mouse_gene_id(conn, species: Optional[str],
                          gene_id: Optional[str]) -> Optional[str]:
     """Given a species which is not mouse map the gene_id\
     to respective mouse gene id"""
@@ -278,27 +304,28 @@ def map_to_mouse_gene_id(database, species: Optional[str],
     if species == "mouse":
         return gene_id
 
+    cursor = conn.cursor()
     query = """SELECT mouse
                 FROM GeneIDXRef
                 WHERE '%s' = '%s'"""
 
     query_values = (species, gene_id)
-
-    results = database.execute(query_formatter(query,
-                                               *query_values)).fetchone()
+    cursor.execute(query_formatter(query,
+                                   *query_values))
+    results = cursor.fetchone()
 
     mouse_gene_id = results.mouse if results is not None else None
 
     return mouse_gene_id
 
 
-def compute_all_lit_correlation(database_instance, trait_lists: List,
+def compute_all_lit_correlation(conn, trait_lists: List,
                                 species: str, gene_id):
     """Function that acts as an abstraction for
     lit_correlation_for_trait_list"""
 
     lit_results = lit_correlation_for_trait_list(
-        database=database_instance,
+        conn=conn,
         target_trait_lists=trait_lists,
         species=species,
         trait_gene_id=gene_id)
@@ -307,18 +334,22 @@ def compute_all_lit_correlation(database_instance, trait_lists: List,
 
 
 def compute_all_tissue_correlation(primary_tissue_dict: dict,
-                                   target_tissues_dict_list: List,
+                                   target_tissues_data: dict,
                                    corr_method: str):
     """Function acts as an abstraction for tissue_correlation_for_trait_list\
-    required input are target tissue object and primary tissue trait
+    required input are target tissue object and primary tissue trait\
+    target tissues data contains the trait_symbol_dict and symbol_tissue_vals
 
     """
 
     tissues_results = {}
 
     primary_tissue_vals = primary_tissue_dict["tissue_values"]
+    traits_symbol_dict = target_tissues_data["trait_symbol_dict"]
+    symbol_tissue_vals_dict = target_tissues_data["symbol_tissue_vals_dict"]
 
-    target_tissues_list = target_tissues_dict_list
+    target_tissues_list = process_trait_symbol_dict(
+        traits_symbol_dict, symbol_tissue_vals_dict)
 
     for target_tissue_obj in target_tissues_list:
         trait_id = target_tissue_obj.get("trait_id")
@@ -333,3 +364,22 @@ def compute_all_tissue_correlation(primary_tissue_dict: dict,
         tissues_results[trait_id] = tissue_result
 
     return tissues_results
+
+
+def process_trait_symbol_dict(trait_symbol_dict, symbol_tissue_vals_dict) -> List:
+    """Method for processing trait symbol\
+    dict given the symbol tissue values """
+    traits_tissue_vals = []
+
+    for (trait, symbol) in trait_symbol_dict.items():
+        if symbol is not None:
+            target_symbol = symbol.lower()
+            if target_symbol in symbol_tissue_vals_dict:
+                trait_tissue_val = symbol_tissue_vals_dict[target_symbol]
+                target_tissue_dict = {"trait_id": trait,
+                                      "symbol": target_symbol,
+                                      "tissue_values": trait_tissue_val}
+
+                traits_tissue_vals.append(target_tissue_dict)
+
+    return traits_tissue_vals
diff --git a/gn3/db_utils.py b/gn3/db_utils.py
new file mode 100644
index 0000000..34c5bf0
--- /dev/null
+++ b/gn3/db_utils.py
@@ -0,0 +1,24 @@
+"""module contains all db related stuff"""
+from typing import Tuple
+from urllib.parse import urlparse
+import MySQLdb as mdb   # type: ignore
+from gn3.settings import SQL_URI
+
+
+def parse_db_url() -> Tuple:
+    """function to parse SQL_URI env variable note:there\
+    is a default value for SQL_URI so a tuple result is\
+    always expected"""
+    parsed_db = urlparse(SQL_URI)
+    return (parsed_db.hostname, parsed_db.username,
+            parsed_db.password, parsed_db.path[1:])
+
+
+def database_connector()->Tuple:
+    """function to create db connector"""
+    host, user, passwd, db_name = parse_db_url()
+    conn = mdb.connect(host, user, passwd, db_name)
+    cursor = conn.cursor()
+
+    return (conn, cursor)
+    
\ No newline at end of file
diff --git a/gn3/settings.py b/gn3/settings.py
index d9d4f90..478a041 100644
--- a/gn3/settings.py
+++ b/gn3/settings.py
@@ -12,7 +12,7 @@ REDIS_JOB_QUEUE = "GN3::job-queue"
 TMPDIR = os.environ.get("TMPDIR", tempfile.gettempdir())
 
 # SQL confs
-SQLALCHEMY_DATABASE_URI = "mysql://kabui:1234@localhost/test"
+SQL_URI = os.environ.get("SQL_URI", "mysql://kabui:1234@localhost/db_webqtl")
 SECRET_KEY = "password"
 SQLALCHEMY_TRACK_MODIFICATIONS = False
 # gn2 results only used in fetching dataset info