diff options
author | Alexander_Kabui | 2024-01-02 13:21:07 +0300 |
---|---|---|
committer | Alexander_Kabui | 2024-01-02 13:21:07 +0300 |
commit | 70c4201b332e0e2c0d958428086512f291469b87 (patch) | |
tree | aea4fac8782c110fc233c589c3f0f7bd34bada6c /wqflask/base/data_set/utils.py | |
parent | 5092eb42f062b1695c4e39619f0bd74a876cfac2 (diff) | |
parent | 965ce5114d585624d5edb082c710b83d83a3be40 (diff) | |
download | genenetwork2-70c4201b332e0e2c0d958428086512f291469b87.tar.gz |
merge changes
Diffstat (limited to 'wqflask/base/data_set/utils.py')
-rw-r--r-- | wqflask/base/data_set/utils.py | 80 |
1 files changed, 0 insertions, 80 deletions
diff --git a/wqflask/base/data_set/utils.py b/wqflask/base/data_set/utils.py deleted file mode 100644 index d18180c4..00000000 --- a/wqflask/base/data_set/utils.py +++ /dev/null @@ -1,80 +0,0 @@ -"data_set package utilities" - -import datetime -import os -import json -import hashlib -from typing import List - - -from utility.tools import get_setting, SQL_URI -from base.webqtlConfig import TMPDIR -from wqflask.database import parse_db_url, database_connection - -def geno_mrna_confidentiality(ob): - with database_connection(get_setting("SQL_URI")) as conn, conn.cursor() as cursor: - cursor.execute( - "SELECT confidentiality, " - f"AuthorisedUsers FROM {ob.type}Freeze WHERE Name = %s", - (ob.name,) - ) - result = cursor.fetchall() - if len(result) > 0 and result[0]: - return True - -def query_table_timestamp(dataset_type: str): - """function to query the update timestamp of a given dataset_type""" - - # computation data and actions - with database_connection(get_setting("SQL_URI")) as conn, conn.cursor() as cursor: - fetch_db_name = parse_db_url(SQL_URI) - cursor.execute( - "SELECT UPDATE_TIME FROM " - "information_schema.tables " - f"WHERE TABLE_SCHEMA = '{fetch_db_name[3]}' " - f"AND TABLE_NAME = '{dataset_type}Data'") - date_time_obj = cursor.fetchone()[0] - if not date_time_obj: - date_time_obj = datetime.datetime.now() - return date_time_obj.strftime("%Y-%m-%d %H:%M:%S") - - -def generate_hash_file(dataset_name: str, dataset_type: str, dataset_timestamp: str, samplelist: str): - """given the trait_name generate a unique name for this""" - string_unicode = f"{dataset_name}{dataset_timestamp}{samplelist}".encode() - md5hash = hashlib.md5(string_unicode) - return md5hash.hexdigest() - - -def cache_dataset_results(dataset_name: str, dataset_type: str, samplelist: List, query_results: List): - """function to cache dataset query results to file - input dataset_name and type query_results(already processed in default dict format) - """ - # data computations actions - # store the file path on redis - - table_timestamp = query_table_timestamp(dataset_type) - samplelist_as_str = ",".join(samplelist) - - file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str) - file_path = os.path.join(TMPDIR, f"{file_name}.json") - - with open(file_path, "w") as file_handler: - json.dump(query_results, file_handler) - - -def fetch_cached_results(dataset_name: str, dataset_type: str, samplelist: List): - """function to fetch the cached results""" - - table_timestamp = query_table_timestamp(dataset_type) - samplelist_as_str = ",".join(samplelist) - - file_name = generate_hash_file(dataset_name, dataset_type, table_timestamp, samplelist_as_str) - file_path = os.path.join(TMPDIR, f"{file_name}.json") - try: - with open(file_path, "r") as file_handler: - - return json.load(file_handler) - - except Exception: - pass |