about summary refs log tree commit diff
path: root/gn3
diff options
context:
space:
mode:
Diffstat (limited to 'gn3')
-rw-r--r--gn3/api/metadata.py2
-rw-r--r--gn3/authentication.py3
-rw-r--r--gn3/case_attributes.py6
-rw-r--r--gn3/commands.py4
-rw-r--r--gn3/computations/correlations.py2
-rw-r--r--gn3/computations/gemma.py13
-rw-r--r--gn3/computations/partial_correlations.py10
-rw-r--r--gn3/computations/pca.py2
-rw-r--r--gn3/computations/qtlreaper.py2
-rw-r--r--gn3/computations/rqtl2.py5
-rw-r--r--gn3/computations/rust_correlation.py4
-rw-r--r--gn3/db/correlations.py4
-rw-r--r--gn3/db/menu.py10
-rw-r--r--gn3/db/sample_data.py24
-rw-r--r--gn3/heatmaps.py4
-rw-r--r--gn3/oauth2/jwks.py2
16 files changed, 51 insertions, 46 deletions
diff --git a/gn3/api/metadata.py b/gn3/api/metadata.py
index 59c25d3..e272c0d 100644
--- a/gn3/api/metadata.py
+++ b/gn3/api/metadata.py
@@ -172,7 +172,7 @@ def view_history(id_):
             "history": history,
         })
     if history.get("error"):
-        raise Exception(history.get("error_description"))
+        raise Exception(history.get("error_description")) # pylint: disable=[broad-exception-raised]
     return history
 
 
diff --git a/gn3/authentication.py b/gn3/authentication.py
index bb717dd..e7e4543 100644
--- a/gn3/authentication.py
+++ b/gn3/authentication.py
@@ -94,7 +94,8 @@ def get_highest_user_access_role(
     access_role = {}
     response = requests.get(urljoin(gn_proxy_url,
                                     ("available?resource="
-                                     f"{resource_id}&user={user_id}")))
+                                     f"{resource_id}&user={user_id}")),
+                            timeout=500)
     for key, value in json.loads(response.content).items():
         access_role[key] = max(map(lambda role: role_mapping[role], value))
     return access_role
diff --git a/gn3/case_attributes.py b/gn3/case_attributes.py
index 2c878d2..2a456bd 100644
--- a/gn3/case_attributes.py
+++ b/gn3/case_attributes.py
@@ -78,7 +78,8 @@ def required_access(
                 # this section fetches the resource ID from the auth server
                 urljoin(current_app.config["AUTH_SERVER_URL"],
                         "auth/resource/populations/resource-id"
-                        f"/{__species_id__(conn)}/{inbredset_id}"))
+                        f"/{__species_id__(conn)}/{inbredset_id}"),
+                timeout=300)
             if result.status_code == 200:
                 resource_id = result.json()["resource-id"]
                 auth = requests.post(
@@ -87,7 +88,8 @@ def required_access(
                     urljoin(current_app.config["AUTH_SERVER_URL"],
                             "auth/resource/authorisation"),
                     json={"resource-ids": [resource_id]},
-                    headers={"Authorization": f"Bearer {token['access_token']}"})
+                    headers={"Authorization": f"Bearer {token['access_token']}"},
+                    timeout=300)
                 if auth.status_code == 200:
                     privs = tuple(priv["privilege_id"]
                                   for role in auth.json()[resource_id]["roles"]
diff --git a/gn3/commands.py b/gn3/commands.py
index 74c634c..38153bf 100644
--- a/gn3/commands.py
+++ b/gn3/commands.py
@@ -84,7 +84,7 @@ def compose_pcorrs_command(
             return "pearsons"
         if "spearmans" in mthd:
             return "spearmans"
-        raise Exception(f"Invalid method '{method}'")
+        raise Exception(f"Invalid method '{method}'")# pylint: disable=[broad-exception-raised]
 
     prefix_cmd = (
         f"{sys.executable}", "-m", "scripts.partial_correlations",
@@ -98,7 +98,7 @@ def compose_pcorrs_command(
             kwargs.get("target_database") is None
             and kwargs.get("target_traits") is not None):
         return compose_pcorrs_command_for_selected_traits(prefix_cmd, **kwargs)
-    raise Exception("Invalid state: I don't know what command to generate!")
+    raise Exception("Invalid state: I don't know what command to generate!")# pylint: disable=[broad-exception-raised]
 
 def queue_cmd(conn: Redis,
               job_queue: str,
diff --git a/gn3/computations/correlations.py b/gn3/computations/correlations.py
index d805af7..e5934b6 100644
--- a/gn3/computations/correlations.py
+++ b/gn3/computations/correlations.py
@@ -196,7 +196,7 @@ def compute_all_sample_correlation(this_trait,
 
     """
     this_trait_samples = this_trait["trait_sample_data"]
-    with Pool(processes=(cpu_count() - 1)) as pool:
+    with Pool(processes=cpu_count() - 1) as pool:
         return sorted(
             (
                 corr for corr in
diff --git a/gn3/computations/gemma.py b/gn3/computations/gemma.py
index 6c53ecc..f07628f 100644
--- a/gn3/computations/gemma.py
+++ b/gn3/computations/gemma.py
@@ -41,12 +41,13 @@ def generate_pheno_txt_file(trait_filename: str,
 
 
 # pylint: disable=R0913
-def generate_gemma_cmd(gemma_cmd: str,
-                       output_dir: str,
-                       token: str,
-                       gemma_kwargs: Dict,
-                       gemma_wrapper_kwargs: Optional[Dict] = None,
-                       chromosomes: Optional[str] = None) -> Dict:
+def generate_gemma_cmd(# pylint: disable=[too-many-positional-arguments]
+        gemma_cmd: str,
+        output_dir: str,
+        token: str,
+        gemma_kwargs: Dict,
+        gemma_wrapper_kwargs: Optional[Dict] = None,
+        chromosomes: Optional[str] = None) -> Dict:
     """Compute k values"""
     _hash = get_hash_of_files(
         [v for k, v in gemma_kwargs.items() if k in ["g", "p", "a", "c"]])
diff --git a/gn3/computations/partial_correlations.py b/gn3/computations/partial_correlations.py
index 6eee299..88c6f3b 100644
--- a/gn3/computations/partial_correlations.py
+++ b/gn3/computations/partial_correlations.py
@@ -209,7 +209,7 @@ def good_dataset_samples_indexes(
         samples_from_file.index(good) for good in
         set(samples).intersection(set(samples_from_file))))
 
-def partial_correlations_fast(# pylint: disable=[R0913, R0914]
+def partial_correlations_fast(# pylint: disable=[R0913, R0914, too-many-positional-arguments]
         samples, primary_vals, control_vals, database_filename,
         fetched_correlations, method: str, correlation_type: str) -> Generator:
     """
@@ -334,7 +334,7 @@ def compute_partial(
     This implementation reworks the child function `compute_partial` which will
     then be used in the place of `determinPartialsByR`.
     """
-    with Pool(processes=(cpu_count() - 1)) as pool:
+    with Pool(processes=cpu_count() - 1) as pool:
         return (
             result for result in (
                 pool.starmap(
@@ -345,7 +345,7 @@ def compute_partial(
                      for target in targets)))
         if result is not None)
 
-def partial_correlations_normal(# pylint: disable=R0913
+def partial_correlations_normal(# pylint: disable=[R0913, too-many-positional-arguments]
         primary_vals, control_vals, input_trait_gene_id, trait_database,
         data_start_pos: int, db_type: str, method: str) -> Generator:
     """
@@ -381,7 +381,7 @@ def partial_correlations_normal(# pylint: disable=R0913
 
     return all_correlations
 
-def partial_corrs(# pylint: disable=[R0913]
+def partial_corrs(# pylint: disable=[R0913, too-many-positional-arguments]
         conn, samples, primary_vals, control_vals, return_number, species,
         input_trait_geneid, input_trait_symbol, tissue_probeset_freeze_id,
         method, dataset, database_filename):
@@ -667,7 +667,7 @@ def check_for_common_errors(# pylint: disable=[R0914]
 
     return non_error_result
 
-def partial_correlations_with_target_db(# pylint: disable=[R0913, R0914, R0911]
+def partial_correlations_with_target_db(# pylint: disable=[R0913, R0914, R0911 too-many-positional-arguments]
         conn: Any, primary_trait_name: str,
         control_trait_names: Tuple[str, ...], method: str,
         criteria: int, target_db_name: str) -> dict:
diff --git a/gn3/computations/pca.py b/gn3/computations/pca.py
index 35c9f03..4cc04a3 100644
--- a/gn3/computations/pca.py
+++ b/gn3/computations/pca.py
@@ -133,7 +133,7 @@ def generate_pca_temp_traits(
 
     """
 
-    # pylint: disable=too-many-arguments
+    # pylint: disable=[too-many-arguments, too-many-positional-arguments]
 
     pca_trait_dict = {}
 
diff --git a/gn3/computations/qtlreaper.py b/gn3/computations/qtlreaper.py
index 08c387f..e0d9d0b 100644
--- a/gn3/computations/qtlreaper.py
+++ b/gn3/computations/qtlreaper.py
@@ -38,7 +38,7 @@ def create_output_directory(path: str):
         # If the directory already exists, do nothing.
         pass
 
-# pylint: disable=too-many-arguments
+# pylint: disable=[too-many-arguments, too-many-positional-arguments]
 def run_reaper(
         reaper_cmd: str,
         genotype_filename: str, traits_filename: str,
diff --git a/gn3/computations/rqtl2.py b/gn3/computations/rqtl2.py
index b482c47..e3a6446 100644
--- a/gn3/computations/rqtl2.py
+++ b/gn3/computations/rqtl2.py
@@ -57,9 +57,8 @@ def validate_required_keys(required_keys: list, data: dict) -> tuple[bool, str]:
     return True, ""
 
 
-def compose_rqtl2_cmd(rqtl_path, input_file,
-                      output_file, workspace_dir,
-                      data, config):
+def compose_rqtl2_cmd(# pylint: disable=[too-many-positional-arguments]
+        rqtl_path, input_file, output_file, workspace_dir, data, config):
     """Compose the command for running the R/QTL2 analysis."""
     # pylint: disable=R0913
     params = {
diff --git a/gn3/computations/rust_correlation.py b/gn3/computations/rust_correlation.py
index 5ce097d..69817d1 100644
--- a/gn3/computations/rust_correlation.py
+++ b/gn3/computations/rust_correlation.py
@@ -53,7 +53,7 @@ def run_correlation(
         corr_type: str = "sample", top_n: int = 500):
     """entry function to call rust correlation"""
 
-    # pylint: disable=too-many-arguments
+    # pylint: disable=[too-many-arguments, too-many-positional-arguments]
     correlation_command = current_app.config["CORRELATION_COMMAND"] # make arg?
     (tmp_dir, tmp_file) = generate_input_files(dataset)
     (output_file, json_file) = generate_json_file(
@@ -67,7 +67,7 @@ def run_correlation(
             os.readlink(correlation_command)
             if os.path.islink(correlation_command)
             else correlation_command)
-        raise Exception(command_list, actual_command, cpe.stdout) from cpe
+        raise Exception(command_list, actual_command, cpe.stdout) from cpe# pylint: disable=[broad-exception-raised]
 
     return parse_correlation_output(output_file, corr_type, top_n)
 
diff --git a/gn3/db/correlations.py b/gn3/db/correlations.py
index aec8eac..5d6cfb3 100644
--- a/gn3/db/correlations.py
+++ b/gn3/db/correlations.py
@@ -328,7 +328,7 @@ def build_temporary_tissue_correlations_table(
 
     return temp_table_name
 
-def fetch_tissue_correlations(# pylint: disable=R0913
+def fetch_tissue_correlations(# pylint: disable=[R0913, too-many-arguments, too-many-positional-arguments]
         dataset: dict, trait_symbol: str, probeset_freeze_id: int, method: str,
         return_number: int, conn: Any) -> dict:
     """
@@ -529,7 +529,7 @@ def __build_query__(
             f"ORDER BY {db_type}.Id"),
         1)
 
-# pylint: disable=too-many-arguments
+# pylint: disable=[too-many-arguments, too-many-positional-arguments]
 def __fetch_data__(
         conn, sample_ids: tuple, db_name: str, db_type: str, method: str,
         temp_table: Optional[str]) -> Tuple[Tuple[Any], int]:
diff --git a/gn3/db/menu.py b/gn3/db/menu.py
index 8dccabf..3fe9062 100644
--- a/gn3/db/menu.py
+++ b/gn3/db/menu.py
@@ -14,10 +14,12 @@ def gen_dropdown_json(conn):
     groups = get_groups(conn, tuple(row[0] for row in species))
     types = get_types(conn, groups)
     datasets = get_datasets(conn, types)
-    return dict(species=species,
-                groups=groups,
-                types=types,
-                datasets=datasets)
+    return {
+        "species": species,
+        "groups": groups,
+        "types": types,
+        "datasets": datasets
+    }
 
 def get_groups(conn, species_names: Tuple[str, ...]):
     """Build groups list"""
diff --git a/gn3/db/sample_data.py b/gn3/db/sample_data.py
index 8db40e3..415b0b0 100644
--- a/gn3/db/sample_data.py
+++ b/gn3/db/sample_data.py
@@ -302,8 +302,8 @@ def update_sample_data(
     if data_type == "mrna":
         strain_id, data_id, inbredset_id = get_mrna_sample_data_ids(
             conn=conn,
-            probeset_id=int(probeset_id),
-            dataset_name=dataset_name,
+            probeset_id=int(probeset_id),# pylint: disable=[possibly-used-before-assignment]
+            dataset_name=dataset_name,# pylint: disable=[possibly-used-before-assignment]
             strain_name=extract_strain_name(csv_header, original_data),
         )
         none_case_attrs = {
@@ -315,8 +315,8 @@ def update_sample_data(
     else:
         strain_id, data_id, inbredset_id = get_pheno_sample_data_ids(
             conn=conn,
-            publishxref_id=int(trait_name),
-            phenotype_id=phenotype_id,
+            publishxref_id=int(trait_name),# pylint: disable=[possibly-used-before-assignment]
+            phenotype_id=phenotype_id,# pylint: disable=[possibly-used-before-assignment]
             strain_name=extract_strain_name(csv_header, original_data),
         )
         none_case_attrs = {
@@ -422,8 +422,8 @@ def delete_sample_data(
     if data_type == "mrna":
         strain_id, data_id, inbredset_id = get_mrna_sample_data_ids(
             conn=conn,
-            probeset_id=int(probeset_id),
-            dataset_name=dataset_name,
+            probeset_id=int(probeset_id),# pylint: disable=[possibly-used-before-assignment]
+            dataset_name=dataset_name,# pylint: disable=[possibly-used-before-assignment]
             strain_name=extract_strain_name(csv_header, data),
         )
         none_case_attrs: Dict[str, Any] = {
@@ -435,8 +435,8 @@ def delete_sample_data(
     else:
         strain_id, data_id, inbredset_id = get_pheno_sample_data_ids(
             conn=conn,
-            publishxref_id=int(trait_name),
-            phenotype_id=phenotype_id,
+            publishxref_id=int(trait_name),# pylint: disable=[possibly-used-before-assignment]
+            phenotype_id=phenotype_id,# pylint: disable=[possibly-used-before-assignment]
             strain_name=extract_strain_name(csv_header, data),
         )
         none_case_attrs = {
@@ -528,8 +528,8 @@ def insert_sample_data(
     if data_type == "mrna":
         strain_id, data_id, inbredset_id = get_mrna_sample_data_ids(
             conn=conn,
-            probeset_id=int(probeset_id),
-            dataset_name=dataset_name,
+            probeset_id=int(probeset_id),# pylint: disable=[possibly-used-before-assignment]
+            dataset_name=dataset_name,# pylint: disable=[possibly-used-before-assignment]
             strain_name=extract_strain_name(csv_header, data),
         )
         none_case_attrs = {
@@ -541,8 +541,8 @@ def insert_sample_data(
     else:
         strain_id, data_id, inbredset_id = get_pheno_sample_data_ids(
             conn=conn,
-            publishxref_id=int(trait_name),
-            phenotype_id=phenotype_id,
+            publishxref_id=int(trait_name),# pylint: disable=[possibly-used-before-assignment]
+            phenotype_id=phenotype_id,# pylint: disable=[possibly-used-before-assignment]
             strain_name=extract_strain_name(csv_header, data),
         )
         none_case_attrs = {
diff --git a/gn3/heatmaps.py b/gn3/heatmaps.py
index b6822d4..511996a 100644
--- a/gn3/heatmaps.py
+++ b/gn3/heatmaps.py
@@ -292,7 +292,7 @@ def process_traits_data_for_heatmap(data, trait_names, chromosome_names):
         for chr_name in chromosome_names]
     return hdata
 
-def clustered_heatmap(
+def clustered_heatmap(# pylint: disable=[too-many-positional-arguments]
         data: Sequence[Sequence[float]], clustering_data: Sequence[float],
         x_axis,#: Dict[Union[str, int], Union[str, Sequence[str]]],
         y_axis: Dict[str, Union[str, Sequence[str]]],
@@ -335,7 +335,7 @@ def clustered_heatmap(
         fig.add_trace(
             heatmap,
             row=((i + 2) if vertical else 1),
-            col=(1 if vertical else (i + 2)))
+            col=(1 if vertical else i + 2))
 
     axes_layouts = {
         "{axis}axis{count}".format( # pylint: disable=[C0209]
diff --git a/gn3/oauth2/jwks.py b/gn3/oauth2/jwks.py
index 8798a3f..c670bf7 100644
--- a/gn3/oauth2/jwks.py
+++ b/gn3/oauth2/jwks.py
@@ -12,7 +12,7 @@ from gn3.oauth2.errors import TokenValidationError
 def fetch_jwks(authserveruri: str, path: str = "auth/public-jwks") -> KeySet:
     """Fetch the JWKs from a particular URI"""
     try:
-        response = requests.get(urljoin(authserveruri, path))
+        response = requests.get(urljoin(authserveruri, path), timeout=300)
         if response.status_code == 200:
             return KeySet([
                 JsonWebKey.import_key(key) for key in response.json()["jwks"]])