about summary refs log tree commit diff
path: root/wqflask/utility
diff options
context:
space:
mode:
authorzsloan2022-01-14 18:22:32 +0000
committerzsloan2022-01-14 18:22:32 +0000
commit68ac19153b128f60b660e11365e5fd4304c95300 (patch)
tree198e03522af43a2d41f3c02cf3785bcfd4635fc4 /wqflask/utility
parentf588ad96ae5045499860fa6e2740e101ad4410d7 (diff)
parent9ab0c3b6cc146e1711f1478242d4198eed720e4c (diff)
downloadgenenetwork2-68ac19153b128f60b660e11365e5fd4304c95300.tar.gz
Merge branch 'testing' of github.com:genenetwork/genenetwork2 into feature/add_rqtl_pairscan
Diffstat (limited to 'wqflask/utility')
-rw-r--r--wqflask/utility/authentication_tools.py11
-rw-r--r--wqflask/utility/chunks.py1
-rw-r--r--wqflask/utility/elasticsearch_tools.py121
-rw-r--r--wqflask/utility/redis_tools.py78
-rw-r--r--wqflask/utility/tools.py10
5 files changed, 9 insertions, 212 deletions
diff --git a/wqflask/utility/authentication_tools.py b/wqflask/utility/authentication_tools.py
index 6802d689..afea69e1 100644
--- a/wqflask/utility/authentication_tools.py
+++ b/wqflask/utility/authentication_tools.py
@@ -4,11 +4,12 @@ import requests
 from flask import g
 from base import webqtlConfig
 
-
 from utility.redis_tools import (get_redis_conn,
                                  get_resource_info,
                                  get_resource_id,
                                  add_resource)
+from utility.tools import GN_PROXY_URL
+
 Redis = get_redis_conn()
 
 def check_resource_availability(dataset, trait_id=None):
@@ -24,19 +25,19 @@ def check_resource_availability(dataset, trait_id=None):
     if resource_id:
         resource_info = get_resource_info(resource_id)
 
-        # ZS: If resource isn't already in redis, add it with default
+        # If resource isn't already in redis, add it with default
         # privileges
         if not resource_info:
             resource_info = add_new_resource(dataset, trait_id)
 
-    # ZS: Check if super-user - we should probably come up with some
+    # Check if super-user - we should probably come up with some
     # way to integrate this into the proxy
     if g.user_session.user_id in Redis.smembers("super_users"):
         return webqtlConfig.SUPER_PRIVILEGES
 
     response = None
 
-    the_url = "http://localhost:8080/available?resource={}&user={}".format(
+    the_url = GN_PROXY_URL + "available?resource={}&user={}".format(
         resource_id, g.user_session.user_id)
 
     try:
@@ -93,7 +94,7 @@ def get_group_code(dataset):
 
 
 def check_admin(resource_id=None):
-    the_url = "http://localhost:8080/available?resource={}&user={}".format(
+    the_url = GN_PROXY_URL + "available?resource={}&user={}".format(
         resource_id, g.user_session.user_id)
     try:
         response = json.loads(requests.get(the_url).content)['admin']
diff --git a/wqflask/utility/chunks.py b/wqflask/utility/chunks.py
index 484b5de6..f6e88cbe 100644
--- a/wqflask/utility/chunks.py
+++ b/wqflask/utility/chunks.py
@@ -1,5 +1,4 @@
 import math
-import time
 
 
 def divide_into_chunks(the_list, number_chunks):
diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py
deleted file mode 100644
index eae3ba03..00000000
--- a/wqflask/utility/elasticsearch_tools.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Elasticsearch support
-#
-# Some helpful commands to view the database:
-#
-# You can test the server being up with
-#
-#   curl -H 'Content-Type: application/json' http://localhost:9200
-#
-# List all indices
-#
-#   curl -H 'Content-Type: application/json' 'localhost:9200/_cat/indices?v'
-#
-# To see the users index 'table'
-#
-#   curl http://localhost:9200/users
-#
-# To list all user ids
-#
-# curl -H 'Content-Type: application/json' http://localhost:9200/users/local/_search?pretty=true -d '
-# {
-#     "query" : {
-#         "match_all" : {}
-#     },
-#     "stored_fields": []
-# }'
-#
-# To view a record
-#
-#   curl -H 'Content-Type: application/json' http://localhost:9200/users/local/_search?pretty=true -d '
-#   {
-#     "query" : {
-#       "match" : { "email_address": "pjotr2017@thebird.nl"}
-#     }
-#   }'
-#
-#
-# To delete the users index and data (dangerous!)
-#
-#   curl -XDELETE -H 'Content-Type: application/json' 'localhost:9200/users'
-
-
-from elasticsearch import Elasticsearch, TransportError
-import logging
-
-from utility.logger import getLogger
-logger = getLogger(__name__)
-
-from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
-
-
-def test_elasticsearch_connection():
-    es = Elasticsearch(['http://' + ELASTICSEARCH_HOST + \
-                        ":" + str(ELASTICSEARCH_PORT) + '/'], verify_certs=True)
-    if not es.ping():
-        logger.warning("Elasticsearch is DOWN")
-
-
-def get_elasticsearch_connection(for_user=True):
-    """Return a connection to ES. Returns None on failure"""
-    logger.info("get_elasticsearch_connection")
-    es = None
-    try:
-        assert(ELASTICSEARCH_HOST)
-        assert(ELASTICSEARCH_PORT)
-        logger.info("ES HOST", ELASTICSEARCH_HOST)
-
-        es = Elasticsearch([{
-            "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT
-        }], timeout=30, retry_on_timeout=True) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
-
-        if for_user:
-            setup_users_index(es)
-
-        es_logger = logging.getLogger("elasticsearch")
-        es_logger.setLevel(logging.INFO)
-        es_logger.addHandler(logging.NullHandler())
-    except Exception as e:
-        logger.error("Failed to get elasticsearch connection", e)
-        es = None
-
-    return es
-
-
-def setup_users_index(es_connection):
-    if es_connection:
-        index_settings = {
-            "properties": {
-                "email_address": {
-                    "type": "keyword"}}}
-
-        es_connection.indices.create(index='users', ignore=400)
-        es_connection.indices.put_mapping(
-            body=index_settings, index="users", doc_type="local")
-
-
-def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"):
-    return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type)
-
-
-def save_user(es, user, user_id):
-    es_save_data(es, "users", "local", user, user_id)
-
-
-def get_item_by_unique_column(es, column_name, column_value, index, doc_type):
-    item_details = None
-    try:
-        response = es.search(
-            index=index, doc_type=doc_type, body={
-                "query": {"match": {column_name: column_value}}
-            })
-        if len(response["hits"]["hits"]) > 0:
-            item_details = response["hits"]["hits"][0]["_source"]
-    except TransportError as te:
-        pass
-    return item_details
-
-
-def es_save_data(es, index, doc_type, data_item, data_id,):
-    from time import sleep
-    es.create(index, doc_type, body=data_item, id=data_id)
-    sleep(1)  # Delay 1 second to allow indexing
diff --git a/wqflask/utility/redis_tools.py b/wqflask/utility/redis_tools.py
index de9dde46..a6c5875f 100644
--- a/wqflask/utility/redis_tools.py
+++ b/wqflask/utility/redis_tools.py
@@ -57,30 +57,6 @@ def get_user_by_unique_column(column_name, column_value):
     return item_details
 
 
-def get_users_like_unique_column(column_name, column_value):
-    """Like previous function, but this only checks if the input is a
-    subset of a field and can return multiple results
-
-    """
-    matched_users = []
-
-    if column_value != "":
-        user_list = Redis.hgetall("users")
-        if column_name != "user_id":
-            for key in user_list:
-                user_ob = json.loads(user_list[key])
-                if "user_id" not in user_ob:
-                    set_user_attribute(key, "user_id", key)
-                    user_ob["user_id"] = key
-                if column_name in user_ob:
-                    if column_value in user_ob[column_name]:
-                        matched_users.append(user_ob)
-        else:
-            matched_users.append(load_json_from_redis(user_list, column_value))
-
-    return matched_users
-
-
 def set_user_attribute(user_id, column_name, column_value):
     user_info = json.loads(Redis.hget("users", user_id))
     user_info[column_name] = column_value
@@ -165,52 +141,6 @@ def get_group_info(group_id):
     return group_info
 
 
-def get_group_by_unique_column(column_name, column_value):
-    """ Get group by column; not sure if there's a faster way to do this """
-
-    matched_groups = []
-
-    all_group_list = Redis.hgetall("groups")
-    for key in all_group_list:
-        group_info = json.loads(all_group_list[key])
-        # ZS: Since these fields are lists, search in the list
-        if column_name == "admins" or column_name == "members":
-            if column_value in group_info[column_name]:
-                matched_groups.append(group_info)
-        else:
-            if group_info[column_name] == column_value:
-                matched_groups.append(group_info)
-
-    return matched_groups
-
-
-def get_groups_like_unique_column(column_name, column_value):
-    """Like previous function, but this only checks if the input is a
-    subset of a field and can return multiple results
-
-    """
-    matched_groups = []
-
-    if column_value != "":
-        group_list = Redis.hgetall("groups")
-        if column_name != "group_id":
-            for key in group_list:
-                group_info = json.loads(group_list[key])
-                # ZS: Since these fields are lists, search in the list
-                if column_name == "admins" or column_name == "members":
-                    if column_value in group_info[column_name]:
-                        matched_groups.append(group_info)
-                else:
-                    if column_name in group_info:
-                        if column_value in group_info[column_name]:
-                            matched_groups.append(group_info)
-        else:
-            matched_groups.append(
-                load_json_from_redis(group_list, column_value))
-
-    return matched_groups
-
-
 def create_group(admin_user_ids, member_user_ids=[],
                  group_name="Default Group Name"):
     group_id = str(uuid.uuid4())
@@ -352,11 +282,3 @@ def add_access_mask(resource_id, group_id, access_mask):
     Redis.hset("resources", resource_id, json.dumps(the_resource))
 
     return the_resource
-
-
-def change_resource_owner(resource_id, new_owner_id):
-    the_resource = get_resource_info(resource_id)
-    the_resource['owner_id'] = new_owner_id
-
-    Redis.delete("resource")
-    Redis.hset("resources", resource_id, json.dumps(the_resource))
diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py
index e28abb48..db0b4320 100644
--- a/wqflask/utility/tools.py
+++ b/wqflask/utility/tools.py
@@ -194,7 +194,6 @@ def locate(name, subdir=None):
     if valid_path(base):
         lookfor = base + "/" + name
         if valid_file(lookfor):
-            logger.info("Found: file " + lookfor + "\n")
             return lookfor
         else:
             raise Exception("Can not locate " + lookfor)
@@ -220,9 +219,7 @@ def locate_ignore_error(name, subdir=None):
     if valid_path(base):
         lookfor = base + "/" + name
         if valid_file(lookfor):
-            logger.debug("Found: file " + name + "\n")
             return lookfor
-    logger.info("WARNING: file " + name + " not found\n")
     return None
 
 
@@ -266,6 +263,8 @@ WEBSERVER_MODE = get_setting('WEBSERVER_MODE')
 GN2_BASE_URL = get_setting('GN2_BASE_URL')
 GN2_BRANCH_URL = get_setting('GN2_BRANCH_URL')
 GN_SERVER_URL = get_setting('GN_SERVER_URL')
+GN_PROXY_URL = get_setting('GN_PROXY_URL')
+GN3_LOCAL_URL = get_setting('GN3_LOCAL_URL')
 SERVER_PORT = get_setting_int('SERVER_PORT')
 SQL_URI = get_setting('SQL_URI')
 LOG_LEVEL = get_setting('LOG_LEVEL')
@@ -285,6 +284,7 @@ JS_GN_PATH = get_setting('JS_GN_PATH')
 
 GITHUB_CLIENT_ID = get_setting('GITHUB_CLIENT_ID')
 GITHUB_CLIENT_SECRET = get_setting('GITHUB_CLIENT_SECRET')
+GITHUB_AUTH_URL = ""
 if GITHUB_CLIENT_ID != 'UNKNOWN' and GITHUB_CLIENT_SECRET:
     GITHUB_AUTH_URL = "https://github.com/login/oauth/authorize?client_id=" + \
                       GITHUB_CLIENT_ID + "&client_secret=" + GITHUB_CLIENT_SECRET
@@ -299,10 +299,6 @@ if ORCID_CLIENT_ID != 'UNKNOWN' and ORCID_CLIENT_SECRET:
         "&redirect_uri=" + GN2_BRANCH_URL + "n/login/orcid_oauth2"
     ORCID_TOKEN_URL = get_setting('ORCID_TOKEN_URL')
 
-ELASTICSEARCH_HOST = get_setting('ELASTICSEARCH_HOST')
-ELASTICSEARCH_PORT = get_setting('ELASTICSEARCH_PORT')
-# import utility.elasticsearch_tools as es
-# es.test_elasticsearch_connection()
 
 SMTP_CONNECT = get_setting('SMTP_CONNECT')
 SMTP_USERNAME = get_setting('SMTP_USERNAME')