aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2022-05-06 16:43:26 +0300
committerFrederick Muriuki Muriithi2022-05-06 16:43:26 +0300
commit4a9fd93d01b6d4bd9d9880dbf2274b3c7f2de37b (patch)
tree088287b7e1e65399529a405377968905c5017c01
parentd04241d0e49a50d9aa775042fffe5a7d8cfaf551 (diff)
downloadgenenetwork3-4a9fd93d01b6d4bd9d9880dbf2274b3c7f2de37b.tar.gz
Fix linting and typing errors
-rw-r--r--gn3/api/correlation.py35
-rw-r--r--gn3/computations/partial_correlations.py8
-rw-r--r--gn3/csvcmp.py4
-rw-r--r--gn3/db/partial_correlations.py4
4 files changed, 24 insertions, 27 deletions
diff --git a/gn3/api/correlation.py b/gn3/api/correlation.py
index aeb7f8c..44aaf56 100644
--- a/gn3/api/correlation.py
+++ b/gn3/api/correlation.py
@@ -114,13 +114,10 @@ def partial_correlation():
args = request.get_json()
with_target_db = args.get("with_target_db", True)
- request_errors = None
- if with_target_db:
- request_errors = __errors__(
- args, ("primary_trait", "control_traits", "target_db", "method"))
- else:
- request_errors = __errors__(
- args, ("primary_trait", "control_traits", "target_traits", "method"))
+ request_errors = __errors__(
+ args, ("primary_trait", "control_traits",
+ ("target_db" if with_target_db else "target_traits"),
+ "method"))
if request_errors:
return build_response({
"status": "error",
@@ -140,15 +137,15 @@ def partial_correlation():
int(args.get("criteria", 500))),
job_queue=current_app.config.get("REDIS_JOB_QUEUE"),
env = {"PYTHONPATH": ":".join(sys.path), "SQL_URI": SQL_URI})})
- else:
- with database_connector() as conn:
- results = partial_correlations_with_target_traits(
- conn,
- trait_fullname(args["primary_trait"]),
- tuple(
- trait_fullname(trait) for trait in args["control_traits"]),
- tuple(
- trait_fullname(trait) for trait in args["target_traits"]),
- args["method"])
-
- return build_response({"status": "success", "results": results})
+
+ with database_connector() as conn:
+ results = partial_correlations_with_target_traits(
+ conn,
+ trait_fullname(args["primary_trait"]),
+ tuple(
+ trait_fullname(trait) for trait in args["control_traits"]),
+ tuple(
+ trait_fullname(trait) for trait in args["target_traits"]),
+ args["method"])
+
+ return build_response({"status": "success", "results": results})
diff --git a/gn3/computations/partial_correlations.py b/gn3/computations/partial_correlations.py
index 07c73db..9b15bcb 100644
--- a/gn3/computations/partial_correlations.py
+++ b/gn3/computations/partial_correlations.py
@@ -555,7 +555,7 @@ def trait_for_output(trait):
}
return {key: val for key, val in trait.items() if val is not None}
-def check_for_common_errors(
+def check_for_common_errors(# pylint: disable=[R0914]
conn, primary_trait_name, control_trait_names, threshold):
"""Check for common errors"""
corr_min_informative = 4
@@ -676,7 +676,7 @@ def partial_correlations_with_target_db(# pylint: disable=[R0913, R0914, R0911]
check_res = check_for_common_errors(
conn, primary_trait_name, control_trait_names, threshold)
if check_res.get("status") == "error":
- return error_check_results
+ return check_res
primary_trait = check_res["primary_trait"]
input_trait_geneid = primary_trait.get("geneid", 0)
@@ -822,12 +822,12 @@ def partial_correlations_with_target_traits(
check_res = check_for_common_errors(
conn, primary_trait_name, control_trait_names, threshold)
if check_res.get("status") == "error":
- return error_check_results
+ return check_res
target_traits = {
trait["name"]: trait
for trait in traits_info(conn, threshold, target_trait_names)}
- target_traits_data = traits_data(conn, target_traits.values())
+ target_traits_data = traits_data(conn, tuple(target_traits.values()))
def __merge(trait, pcorrs):
return {
diff --git a/gn3/csvcmp.py b/gn3/csvcmp.py
index 82d491e..3aba184 100644
--- a/gn3/csvcmp.py
+++ b/gn3/csvcmp.py
@@ -1,6 +1,6 @@
"""This module contains functions for manipulating and working with csv
texts"""
-from typing import Any, List
+from typing import List
import re
import json
@@ -70,7 +70,7 @@ def csv_diff(base_csv, delta_csv, tmp_dir="/tmp") -> dict:
base_csv_header, delta_csv_header = line, delta_csv_list[i]
break
- longest_header = max(base_csv_header, delta_csv_header, key=lambda x: len(x))
+ longest_header = max(base_csv_header, delta_csv_header, key=len)
if base_csv_header != delta_csv_header:
if longest_header != base_csv_header:
base_csv = "\n".join([longest_header] + base_csv_list[1:])
diff --git a/gn3/db/partial_correlations.py b/gn3/db/partial_correlations.py
index 96f953d..e8466f6 100644
--- a/gn3/db/partial_correlations.py
+++ b/gn3/db/partial_correlations.py
@@ -9,7 +9,7 @@ This module is part of the optimisation effort for the partial correlations.
"""
from functools import reduce, partial
-from typing import Any, Dict, Tuple, Union, Sequence
+from typing import Any, Dict, Tuple, Union, Sequence, Generator
from MySQLdb.cursors import DictCursor
@@ -750,7 +750,7 @@ def traits_datasets(conn, threshold, traits):
def traits_info(
conn: Any, threshold: int, traits_fullnames: Tuple[str, ...],
- qtl=None) -> Tuple[Dict[str, Any], ...]:
+ qtl=None) -> Generator:
"""
Retrieve basic trait information for multiple `traits`.