From 3e70cab812e29e504f714c782c71bc4b79793686 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 12 Jan 2018 18:05:45 +0300 Subject: Add elasticsearch_tools module * Collect variables and functions for using the elasticsearch system in a separate module. --- wqflask/utility/elasticsearch_tools.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 wqflask/utility/elasticsearch_tools.py (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py new file mode 100644 index 00000000..bc7bb240 --- /dev/null +++ b/wqflask/utility/elasticsearch_tools.py @@ -0,0 +1,22 @@ +from elasticsearch import Elasticsearch, TransportError +from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + +es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT +}]) + +def get_user_by_unique_column(column_name, column_value): + user_details = None + try: + response = es.search( + index = "users" + , doc_type = "local" + , body = { + "query": { "match": { column_name: column_value } } + }) + if len(response["hits"]["hits"]) > 0: + user_details = response["hits"]["hits"][0]["_source"] + except TransportError as te: + pass + return user_details -- cgit v1.2.3 From 7282e0b47c1c6d12fc4c06a080af07c8e67ca75c Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 19 Jan 2018 10:38:04 +0300 Subject: Add save_user() function * On successful login via OAuth2, save the details of the user in elasticsearch store, to avoid hitting the external provider for the basic details. --- wqflask/utility/elasticsearch_tools.py | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index bc7bb240..74db489b 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -20,3 +20,10 @@ def get_user_by_unique_column(column_name, column_value): except TransportError as te: pass return user_details + +def save_user(user, user_id, index="users", doc_type="local"): + es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT + }]) + es.create(index, doc_type, body=user, id=user_id) -- cgit v1.2.3 From e185fd3895473e86f2c9fdf174a36b1d325a8c36 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 19 Jan 2018 12:00:04 +0300 Subject: Delay after save for indexing * Elasticsearch need a short delay after adding document for it to index the document for subsequent access. --- wqflask/utility/elasticsearch_tools.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 74db489b..c2c999ea 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -22,8 +22,10 @@ def get_user_by_unique_column(column_name, column_value): return user_details def save_user(user, user_id, index="users", doc_type="local"): + from time import sleep es = Elasticsearch([{ "host": ELASTICSEARCH_HOST , "port": ELASTICSEARCH_PORT }]) es.create(index, doc_type, body=user, id=user_id) + sleep(1) # Delay 1 second to allow indexing -- cgit v1.2.3 From afaea1b1297d0cf08565746799d2900a6981823a Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Tue, 30 Jan 2018 13:07:58 +0300 Subject: Fail safely if elasticsearch is down or unconfigured * If elasticsearch server is down, or the configuration variables are not provided at startup or in a configuration file, then do not allow the system to simply crash, but instead, inform the user that they cannot use the services that depend on elasticsearch to be running. --- wqflask/utility/elasticsearch_tools.py | 16 ++++++++------ .../wqflask/templates/new_security/login_user.html | 25 ++++++++++++++++++---- wqflask/wqflask/user_manager.py | 6 +++++- 3 files changed, 36 insertions(+), 11 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index c2c999ea..8b8ad9cc 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,10 +1,14 @@ -from elasticsearch import Elasticsearch, TransportError -from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT +es = None +try: + from elasticsearch import Elasticsearch, TransportError + from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT -es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT -}]) + es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT + }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None +except: + es = None def get_user_by_unique_column(column_name, column_value): user_details = None diff --git a/wqflask/wqflask/templates/new_security/login_user.html b/wqflask/wqflask/templates/new_security/login_user.html index 15f0a27e..0dae3503 100644 --- a/wqflask/wqflask/templates/new_security/login_user.html +++ b/wqflask/wqflask/templates/new_security/login_user.html @@ -16,7 +16,14 @@

Don't have an account?

- Create a new account + {% if es_server: %} + Create a new account + {% else: %} +
+

You cannot create an account at this moment.
+ Please try again later.

+
+ {% endif %}

Login with external services

@@ -31,13 +38,17 @@ Login with ORCID {% endif %} - -
+ {% else: %} +
+

You cannot login with external services at this time.
+ Please try again later.

+
{% endif %} +

Already have an account? Sign in here.

- + {% if es_server: %}
@@ -75,6 +86,12 @@
+ {% else: %} +
+

You cannot login at this moment using your GeneNetwork account.
+ Please try again later.

+
+ {% endif %} diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 4322945b..772d6c83 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -585,13 +585,17 @@ class LoginUser(object): logger.debug("in login params are:", params) if not params: from utility.tools import GITHUB_AUTH_URL, ORCID_AUTH_URL + from utility.elasticsearch_tools import es external_login = None if GITHUB_AUTH_URL or ORCID_AUTH_URL: external_login={ "github": GITHUB_AUTH_URL, "orcid": ORCID_AUTH_URL } - return render_template("new_security/login_user.html", external_login=external_login) + return render_template( + "new_security/login_user.html" + , external_login=external_login + , es_server=es) else: user_details = get_user_by_unique_column("email_address", params["email_address"]) user = None -- cgit v1.2.3 From 1c9540879d8761d9252c3fb3f749ae0b6d5be2b9 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Mon, 5 Feb 2018 20:01:45 +0300 Subject: Refactor common items to more generic methods. * Refactor code that can be used in more than one place to a more generic method/function that's called by other methods --- wqflask/utility/elasticsearch_tools.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 8b8ad9cc..ea636b2e 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -11,25 +11,27 @@ except: es = None def get_user_by_unique_column(column_name, column_value): - user_details = None + return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local") + +def save_user(user, user_id): + es_save_data("users", "local", user, user_id) + +def get_item_by_unique_column(column_name, column_value, index, doc_type): + item_details = None try: response = es.search( - index = "users" - , doc_type = "local" + index = index + , doc_type = doc_type , body = { "query": { "match": { column_name: column_value } } }) if len(response["hits"]["hits"]) > 0: - user_details = response["hits"]["hits"][0]["_source"] + item_details = response["hits"]["hits"][0]["_source"] except TransportError as te: pass - return user_details + return item_details -def save_user(user, user_id, index="users", doc_type="local"): +def es_save_data(index, doc_type, data_item, data_id,): from time import sleep - es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT - }]) - es.create(index, doc_type, body=user, id=user_id) + es.create(index, doc_type, body=data_item, id=data_id) sleep(1) # Delay 1 second to allow indexing -- cgit v1.2.3 From 25ff5e9e47c568547a855e42a4fd43477db2a61e Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 9 Feb 2018 10:11:21 +0300 Subject: Add check for elasticsearch * Add some extra checks to ensure that elasticsearch is running before presenting the UI to the user. --- wqflask/utility/elasticsearch_tools.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index ea636b2e..a0383033 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -7,6 +7,10 @@ try: "host": ELASTICSEARCH_HOST , "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None + + # Check if elasticsearch is running + if not es.ping(): + es = None except: es = None -- cgit v1.2.3 From 4fe60697964d81ce1425000e764fd0a27d73eb29 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 9 Feb 2018 11:02:07 +0300 Subject: Check elasticsearch at point of use * Instead of checking for the state of elasticsearch at startup, check the state at the moment the user requests a feature that depends on elasticsearch. This reduces the chances that the user is dropped onto an exception page when elasticsearch server goes down. --- wqflask/utility/elasticsearch_tools.py | 3 --- wqflask/wqflask/user_manager.py | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index a0383033..4fc0035c 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -8,9 +8,6 @@ try: , "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None - # Check if elasticsearch is running - if not es.ping(): - es = None except: es = None diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 8f09c206..630be9aa 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -626,7 +626,7 @@ class LoginUser(object): return render_template( "new_security/login_user.html" , external_login=external_login - , es_server=es) + , es_server=es.ping()) else: user_details = get_user_by_unique_column("email_address", params["email_address"]) user = None -- cgit v1.2.3 From 1d45fac66b19340ed9378fcc08a928555ad95f74 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Tue, 13 Feb 2018 13:48:28 +0300 Subject: Update module to make it more testable * Update functions to make them more testable. * Update code using updated functions. --- wqflask/utility/elasticsearch_tools.py | 40 +++++++++++++--------- wqflask/wqflask/user_manager.py | 61 +++++++++++++++++++--------------- 2 files changed, 58 insertions(+), 43 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 4fc0035c..a964b025 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,23 +1,31 @@ -es = None -try: - from elasticsearch import Elasticsearch, TransportError - from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT +from elasticsearch import Elasticsearch, TransportError +import logging - es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT - }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None - -except: +def get_elasticsearch_connection(): es = None + try: + from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + + es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT + }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None + + es_logger = logging.getLogger("elasticsearch") + es_logger.setLevel(logging.INFO) + es_logger.addHandler(logging.NullHandler()) + except: + es = None + + return es -def get_user_by_unique_column(column_name, column_value): - return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local") +def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"): + return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type) -def save_user(user, user_id): - es_save_data("users", "local", user, user_id) +def save_user(es, user, user_id): + es_save_data(es, "users", "local", user, user_id) -def get_item_by_unique_column(column_name, column_value, index, doc_type): +def get_item_by_unique_column(es, column_name, column_value, index, doc_type): item_details = None try: response = es.search( @@ -32,7 +40,7 @@ def get_item_by_unique_column(column_name, column_value, index, doc_type): pass return item_details -def es_save_data(index, doc_type, data_item, data_id,): +def es_save_data(es, index, doc_type, data_item, data_id,): from time import sleep es.create(index, doc_type, body=data_item, id=data_id) sleep(1) # Delay 1 second to allow indexing diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 630be9aa..6b667615 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -55,8 +55,9 @@ logger = getLogger(__name__) from base.data_set import create_datasets_list import requests -from utility.elasticsearch_tools import get_user_by_unique_column, save_user, es_save_data +from utility.elasticsearch_tools import * +es = get_elasticsearch_connection() THREE_DAYS = 60 * 60 * 24 * 3 #THREE_DAYS = 45 @@ -271,14 +272,18 @@ class RegisterUser(object): self.thank_you_mode = False self.errors = [] self.user = Bunch() + es = kw.get('es_connection', None) + + if not es: + self.errors.append("Missing connection object") self.user.email_address = kw.get('email_address', '').encode("utf-8").strip() if not (5 <= len(self.user.email_address) <= 50): self.errors.append('Email Address needs to be between 5 and 50 characters.') - - email_exists = get_user_by_unique_column("email_address", self.user.email_address) - if email_exists: - self.errors.append('User already exists with that email') + else: + email_exists = get_user_by_unique_column(es, "email_address", self.user.email_address) + if email_exists: + self.errors.append('User already exists with that email') self.user.full_name = kw.get('full_name', '').encode("utf-8").strip() if not (5 <= len(self.user.full_name) <= 50): @@ -305,7 +310,7 @@ class RegisterUser(object): self.user.confirmed = 1 self.user.registration_info = json.dumps(basic_info(), sort_keys=True) - save_user(self.user.__dict__, self.user.user_id) + save_user(es, self.user.__dict__, self.user.user_id) def set_password(password, user): pwfields = Bunch() @@ -381,7 +386,7 @@ class ForgotPasswordEmail(VerificationEmail): "email_address": toaddr, "timestamp": timestamp() } - es_save_data(self.key_prefix, "local", data, verification_code) + es_save_data(es, self.key_prefix, "local", data, verification_code) subject = self.subject body = render_template( @@ -431,7 +436,6 @@ def verify_email(): @app.route("/n/password_reset", methods=['GET']) def password_reset(): - from utility.elasticsearch_tools import get_item_by_unique_column logger.debug("in password_reset request.url is:", request.url) # We do this mainly just to assert that it's in proper form for displaying next page @@ -441,14 +445,16 @@ def password_reset(): hmac = request.args.get('hm') if verification_code: code_details = get_item_by_unique_column( - "verification_code", - verification_code, - ForgotPasswordEmail.key_prefix, - "local") + es + , "verification_code" + , verification_code + , ForgotPasswordEmail.key_prefix + , "local") if code_details: user_details = get_user_by_unique_column( - "email_address", - code_details["email_address"]) + es + , "email_address" + , code_details["email_address"]) if user_details: return render_template( "new_security/password_reset.html", user_encode=user_details["user_id"]) @@ -533,7 +539,7 @@ def github_oauth2(): result_dict = {arr[0]:arr[1] for arr in [tok.split("=") for tok in [token.encode("utf-8") for token in result.text.split("&")]]} github_user = get_github_user_details(result_dict["access_token"]) - user_details = get_user_by_unique_column("github_id", github_user["id"]) + user_details = get_user_by_unique_column(es, "github_id", github_user["id"]) if user_details == None: user_details = { "user_id": str(uuid.uuid4()) @@ -545,7 +551,7 @@ def github_oauth2(): , "active": 1 , "confirmed": 1 } - save_user(user_details, user_details["user_id"]) + save_user(es, user_details, user_details["user_id"]) url = "/n/login?type=github&uid="+user_details["user_id"] return redirect(url) @@ -566,7 +572,7 @@ def orcid_oauth2(): result = requests.post(ORCID_TOKEN_URL, data=data) result_dict = json.loads(result.text.encode("utf-8")) - user_details = get_user_by_unique_column("orcid", result_dict["orcid"]) + user_details = get_user_by_unique_column(es, "orcid", result_dict["orcid"]) if user_details == None: user_details = { "user_id": str(uuid4()) @@ -580,7 +586,7 @@ def orcid_oauth2(): , "active": 1 , "confirmed": 1 } - save_user(user_details, user_details["user_id"]) + save_user(es, user_details, user_details["user_id"]) url = "/n/login?type=orcid&uid="+user_details["user_id"] else: flash("There was an error getting code from ORCID") @@ -599,7 +605,7 @@ class LoginUser(object): def oauth2_login(self, login_type, user_id): """Login via an OAuth2 provider""" - user_details = get_user_by_unique_column("user_id", user_id) + user_details = get_user_by_unique_column(es, "user_id", user_id) if user_details: user = model.User() user.id = user_details["user_id"] @@ -616,7 +622,6 @@ class LoginUser(object): logger.debug("in login params are:", params) if not params: from utility.tools import GITHUB_AUTH_URL, ORCID_AUTH_URL - from utility.elasticsearch_tools import es external_login = None if GITHUB_AUTH_URL or ORCID_AUTH_URL: external_login={ @@ -628,8 +633,9 @@ class LoginUser(object): , external_login=external_login , es_server=es.ping()) else: - user_details = get_user_by_unique_column("email_address", params["email_address"]) + user_details = get_user_by_unique_column(es, "email_address", params["email_address"]) user = None + valid = None if user_details: user = model.User(); for key in user_details: @@ -672,7 +678,7 @@ class LoginUser(object): else: if user: self.unsuccessful_login(user) - flash("Invalid email-address or password. Please try again.", "alert-error") + flash("Invalid email-address or password. Please try again.", "alert-danger") response = make_response(redirect(url_for('login'))) return response @@ -739,7 +745,7 @@ def forgot_password(): def forgot_password_submit(): params = request.form email_address = params['email_address'] - user_details = get_user_by_unique_column("email_address", email_address) + user_details = get_user_by_unique_column(es, "email_address", email_address) if user_details: ForgotPasswordEmail(user_details["email_address"]) # try: @@ -815,16 +821,17 @@ def register(): params = request.form if request.form else request.args + params = params.to_dict(flat=True) + params["es_connection"] = es if params: logger.debug("Attempting to register the user...") result = RegisterUser(params) errors = result.errors - if result.thank_you_mode: - assert not errors, "Errors while in thank you mode? That seems wrong..." - return render_template("new_security/registered.html", - subject=VerificationEmail.subject) + if len(errors) == 0: + flash("Registration successful. You may login with your new account", "alert-info") + return redirect(url_for("login")) return render_template("new_security/register_user.html", values=params, errors=errors) -- cgit v1.2.3 From 9d47ad572f46397bdd778d8f6e1c37d40e12aa2a Mon Sep 17 00:00:00 2001 From: Pjotr Prins Date: Thu, 15 Feb 2018 10:30:52 +0000 Subject: Fixing authentication stuff so it uses parameters properly. Also no PYTHONPATH needed as it is now in the Guix build. --- bin/genenetwork2 | 12 +++++++----- etc/default_settings.py | 18 ++++++++++++++++++ wqflask/run_gunicorn.py | 3 +++ wqflask/utility/elasticsearch_tools.py | 16 ++++++++++++---- wqflask/utility/tools.py | 29 +++++++++++------------------ wqflask/wqflask/user_manager.py | 7 +++++-- 6 files changed, 56 insertions(+), 29 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/bin/genenetwork2 b/bin/genenetwork2 index 5438c1c0..42f79650 100755 --- a/bin/genenetwork2 +++ b/bin/genenetwork2 @@ -95,11 +95,13 @@ export WQFLASK_OVERRIDES=$overrides # JSON echo WQFLASK_SETTINGS=$settings echo WQFLASK_OVERRIDES=$overrides -if [ -z $ELASTICSEARCH_PROFILE ]; then - echo -e "\033[1;33mWARNING: Elastic Search profile has not been set - use ELASTICSEARCH_PROFILE\033[0m"; -else - PYTHONPATH="$PYTHONPATH${PYTHONPATH:+:}$ELASTICSEARCH_PROFILE/lib/python2.7/site-packages" -fi +# This is a temporary hack to inject ES - should have added python2-elasticsearch package to guix instead +# if [ -z $ELASTICSEARCH_PROFILE ]; then +# echo -e "WARNING: Elastic Search profile has not been set - use ELASTICSEARCH_PROFILE"; +# else +# PYTHONPATH="$PYTHONPATH${PYTHONPATH:+:}$ELASTICSEARCH_PROFILE/lib/python2.7/site-packages" +# fi + if [ -z $GN2_PROFILE ] ; then echo "WARNING: GN2_PROFILE has not been set - you need the environment, so I hope you know what you are doing!" export GN2_PROFILE=$(dirname $(dirname $(which genenetwork2))) diff --git a/etc/default_settings.py b/etc/default_settings.py index 699d21f1..a70d8aec 100644 --- a/etc/default_settings.py +++ b/etc/default_settings.py @@ -41,6 +41,24 @@ SECURITY_POST_LOGIN_VIEW = "/thank_you" SERVER_PORT = 5003 # running on localhost SECRET_HMAC_CODE = '\x08\xdf\xfa\x93N\x80\xd9\\H@\\\x9f`\x98d^\xb4a;\xc6OM\x946a\xbc\xfc\x80:*\xebc' +GITHUB_CLIENT_ID = "UNKNOWN" +GITHUB_CLIENT_SECRET = "UNKNOWN" +GITHUB_AUTH_URL = "UNKNOWN" +GITHUB_API_URL = "UNKNOWN" + +ORCID_CLIENT_ID = "UNKNOWN" +ORCID_CLIENT_SECRET = "UNKNOWN" +ORCID_AUTH_URL = "UNKNOWN" +ORCID_TOKEN_URL = "UNKNOWN" + +ELASTICSEARCH_HOST = "localhost" +ELASTICSEARCH_PORT = '9200' + +SMTP_CONNECT = "UNKNOWN" +SMTP_USERNAME = "UNKNOWN" +SMTP_PASSWORD = "UNKNOWN" + + # ---- Behavioural settings (defaults) note that logger and log levels can # be overridden at the module level and with enviroment settings WEBSERVER_MODE = 'DEV' # Python webserver mode (DEBUG|DEV|PROD) diff --git a/wqflask/run_gunicorn.py b/wqflask/run_gunicorn.py index 14a2d689..ebe3add5 100644 --- a/wqflask/run_gunicorn.py +++ b/wqflask/run_gunicorn.py @@ -11,6 +11,9 @@ print "Starting up Gunicorn process" from wqflask import app +app.config['SESSION_TYPE'] = 'filesystem' +app.config['SECRET_KEY'] = 'super secret key' + @app.route("/gunicorn") def hello(): return "

Hello There!

" diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index a964b025..2d3d5add 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,10 +1,18 @@ from elasticsearch import Elasticsearch, TransportError import logging +from utility.logger import getLogger +logger = getLogger(__name__) + +from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + def get_elasticsearch_connection(): + logger.info("get_elasticsearch_connection") es = None try: - from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + assert(ELASTICSEARCH_HOST) + assert(ELASTICSEARCH_PORT) + logger.info("ES HOST",ELASTICSEARCH_HOST) es = Elasticsearch([{ "host": ELASTICSEARCH_HOST @@ -31,12 +39,12 @@ def get_item_by_unique_column(es, column_name, column_value, index, doc_type): response = es.search( index = index , doc_type = doc_type - , body = { - "query": { "match": { column_name: column_value } } + , body = { + "query": { "match": { column_name: column_value } } }) if len(response["hits"]["hits"]) > 0: item_details = response["hits"]["hits"][0]["_source"] - except TransportError as te: + except TransportError as te: pass return item_details diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py index 005f9b0f..8c9fed96 100644 --- a/wqflask/utility/tools.py +++ b/wqflask/utility/tools.py @@ -251,32 +251,25 @@ assert_dir(JS_GUIX_PATH) JS_GN_PATH = get_setting('JS_GN_PATH') # assert_dir(JS_GN_PATH) -def get_setting_safe(setting): - try: - return get_setting(setting) - except: - print("Could not find the setting '", setting, "'. Continuing with value unset") - return None - -GITHUB_CLIENT_ID = get_setting_safe('GITHUB_CLIENT_ID') -GITHUB_CLIENT_SECRET = get_setting_safe('GITHUB_CLIENT_SECRET') +GITHUB_CLIENT_ID = get_setting('GITHUB_CLIENT_ID') +GITHUB_CLIENT_SECRET = get_setting('GITHUB_CLIENT_SECRET') GITHUB_AUTH_URL = None if GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET: GITHUB_AUTH_URL = "https://github.com/login/oauth/authorize?client_id="+GITHUB_CLIENT_ID+"&client_secret="+GITHUB_CLIENT_SECRET -GITHUB_API_URL = get_setting_safe('GITHUB_API_URL') -ORCID_CLIENT_ID = get_setting_safe('ORCID_CLIENT_ID') -ORCID_CLIENT_SECRET = get_setting_safe('ORCID_CLIENT_SECRET') +GITHUB_API_URL = get_setting('GITHUB_API_URL') +ORCID_CLIENT_ID = get_setting('ORCID_CLIENT_ID') +ORCID_CLIENT_SECRET = get_setting('ORCID_CLIENT_SECRET') ORCID_AUTH_URL = None if ORCID_CLIENT_ID and ORCID_CLIENT_SECRET: ORCID_AUTH_URL = "https://sandbox.orcid.org/oauth/authorize?response_type=code&scope=/authenticate&show_login=true&client_id="+ORCID_CLIENT_ID+"&client_secret="+ORCID_CLIENT_SECRET -ORCID_TOKEN_URL = get_setting_safe('ORCID_TOKEN_URL') +ORCID_TOKEN_URL = get_setting('ORCID_TOKEN_URL') -ELASTICSEARCH_HOST = get_setting_safe('ELASTICSEARCH_HOST') -ELASTICSEARCH_PORT = get_setting_safe('ELASTICSEARCH_PORT') +ELASTICSEARCH_HOST = get_setting('ELASTICSEARCH_HOST') +ELASTICSEARCH_PORT = get_setting('ELASTICSEARCH_PORT') -SMTP_CONNECT = get_setting_safe('SMTP_CONNECT') -SMTP_USERNAME = get_setting_safe('SMTP_USERNAME') -SMTP_PASSWORD = get_setting_safe('SMTP_PASSWORD') +SMTP_CONNECT = get_setting('SMTP_CONNECT') +SMTP_USERNAME = get_setting('SMTP_USERNAME') +SMTP_PASSWORD = get_setting('SMTP_PASSWORD') PYLMM_COMMAND = app_set("PYLMM_COMMAND",pylmm_command()) GEMMA_COMMAND = app_set("GEMMA_COMMAND",gemma_command()) diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 6b667615..c8471cb1 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -55,9 +55,8 @@ logger = getLogger(__name__) from base.data_set import create_datasets_list import requests -from utility.elasticsearch_tools import * +from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column, save_user -es = get_elasticsearch_connection() THREE_DAYS = 60 * 60 * 24 * 3 #THREE_DAYS = 45 @@ -479,6 +478,7 @@ def password_reset_step2(): password = request.form['password'] set_password(password, user) + es = get_elasticsearch_connection() es.update( index = "users" , doc_type = "local" @@ -620,6 +620,7 @@ class LoginUser(object): """Login through the normal form""" params = request.form if request.form else request.args logger.debug("in login params are:", params) + es = get_elasticsearch_connection() if not params: from utility.tools import GITHUB_AUTH_URL, ORCID_AUTH_URL external_login = None @@ -628,6 +629,7 @@ class LoginUser(object): "github": GITHUB_AUTH_URL, "orcid": ORCID_AUTH_URL } + assert(es is not None) return render_template( "new_security/login_user.html" , external_login=external_login @@ -822,6 +824,7 @@ def register(): params = request.form if request.form else request.args params = params.to_dict(flat=True) + es = get_elasticsearch_connection() params["es_connection"] = es if params: -- cgit v1.2.3 From d0f071a3871a2bcbb2c5170996a4afb145c21f9c Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 12 Jan 2018 18:05:45 +0300 Subject: Add elasticsearch_tools module * Collect variables and functions for using the elasticsearch system in a separate module. --- wqflask/utility/elasticsearch_tools.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 wqflask/utility/elasticsearch_tools.py (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py new file mode 100644 index 00000000..bc7bb240 --- /dev/null +++ b/wqflask/utility/elasticsearch_tools.py @@ -0,0 +1,22 @@ +from elasticsearch import Elasticsearch, TransportError +from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + +es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT +}]) + +def get_user_by_unique_column(column_name, column_value): + user_details = None + try: + response = es.search( + index = "users" + , doc_type = "local" + , body = { + "query": { "match": { column_name: column_value } } + }) + if len(response["hits"]["hits"]) > 0: + user_details = response["hits"]["hits"][0]["_source"] + except TransportError as te: + pass + return user_details -- cgit v1.2.3 From 98de7bf9649115b67a13018454a2c3766be1fc12 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 19 Jan 2018 10:38:04 +0300 Subject: Add save_user() function * On successful login via OAuth2, save the details of the user in elasticsearch store, to avoid hitting the external provider for the basic details. --- wqflask/utility/elasticsearch_tools.py | 7 +++++++ 1 file changed, 7 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index bc7bb240..74db489b 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -20,3 +20,10 @@ def get_user_by_unique_column(column_name, column_value): except TransportError as te: pass return user_details + +def save_user(user, user_id, index="users", doc_type="local"): + es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT + }]) + es.create(index, doc_type, body=user, id=user_id) -- cgit v1.2.3 From 7959930d3276b5317d933a428a3c2f9ea8f7ddf4 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 19 Jan 2018 12:00:04 +0300 Subject: Delay after save for indexing * Elasticsearch need a short delay after adding document for it to index the document for subsequent access. --- wqflask/utility/elasticsearch_tools.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 74db489b..c2c999ea 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -22,8 +22,10 @@ def get_user_by_unique_column(column_name, column_value): return user_details def save_user(user, user_id, index="users", doc_type="local"): + from time import sleep es = Elasticsearch([{ "host": ELASTICSEARCH_HOST , "port": ELASTICSEARCH_PORT }]) es.create(index, doc_type, body=user, id=user_id) + sleep(1) # Delay 1 second to allow indexing -- cgit v1.2.3 From e0295504fb0097db394e99568339e24a71406123 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Tue, 30 Jan 2018 13:07:58 +0300 Subject: Fail safely if elasticsearch is down or unconfigured * If elasticsearch server is down, or the configuration variables are not provided at startup or in a configuration file, then do not allow the system to simply crash, but instead, inform the user that they cannot use the services that depend on elasticsearch to be running. --- wqflask/utility/elasticsearch_tools.py | 16 ++++++++------ .../wqflask/templates/new_security/login_user.html | 25 ++++++++++++++++++---- wqflask/wqflask/user_manager.py | 6 +++++- 3 files changed, 36 insertions(+), 11 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index c2c999ea..8b8ad9cc 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,10 +1,14 @@ -from elasticsearch import Elasticsearch, TransportError -from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT +es = None +try: + from elasticsearch import Elasticsearch, TransportError + from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT -es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT -}]) + es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT + }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None +except: + es = None def get_user_by_unique_column(column_name, column_value): user_details = None diff --git a/wqflask/wqflask/templates/new_security/login_user.html b/wqflask/wqflask/templates/new_security/login_user.html index 15f0a27e..0dae3503 100644 --- a/wqflask/wqflask/templates/new_security/login_user.html +++ b/wqflask/wqflask/templates/new_security/login_user.html @@ -16,7 +16,14 @@

Don't have an account?

- Create a new account + {% if es_server: %} + Create a new account + {% else: %} +
+

You cannot create an account at this moment.
+ Please try again later.

+
+ {% endif %}

Login with external services

@@ -31,13 +38,17 @@ Login with ORCID {% endif %} - -
+ {% else: %} +
+

You cannot login with external services at this time.
+ Please try again later.

+
{% endif %} +

Already have an account? Sign in here.

- + {% if es_server: %}
@@ -75,6 +86,12 @@
+ {% else: %} +
+

You cannot login at this moment using your GeneNetwork account.
+ Please try again later.

+
+ {% endif %} diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 4322945b..772d6c83 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -585,13 +585,17 @@ class LoginUser(object): logger.debug("in login params are:", params) if not params: from utility.tools import GITHUB_AUTH_URL, ORCID_AUTH_URL + from utility.elasticsearch_tools import es external_login = None if GITHUB_AUTH_URL or ORCID_AUTH_URL: external_login={ "github": GITHUB_AUTH_URL, "orcid": ORCID_AUTH_URL } - return render_template("new_security/login_user.html", external_login=external_login) + return render_template( + "new_security/login_user.html" + , external_login=external_login + , es_server=es) else: user_details = get_user_by_unique_column("email_address", params["email_address"]) user = None -- cgit v1.2.3 From a494260a8f9cf0e3ecf0c428bb70d4066623f1dd Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Mon, 5 Feb 2018 20:01:45 +0300 Subject: Refactor common items to more generic methods. * Refactor code that can be used in more than one place to a more generic method/function that's called by other methods --- wqflask/utility/elasticsearch_tools.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 8b8ad9cc..ea636b2e 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -11,25 +11,27 @@ except: es = None def get_user_by_unique_column(column_name, column_value): - user_details = None + return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local") + +def save_user(user, user_id): + es_save_data("users", "local", user, user_id) + +def get_item_by_unique_column(column_name, column_value, index, doc_type): + item_details = None try: response = es.search( - index = "users" - , doc_type = "local" + index = index + , doc_type = doc_type , body = { "query": { "match": { column_name: column_value } } }) if len(response["hits"]["hits"]) > 0: - user_details = response["hits"]["hits"][0]["_source"] + item_details = response["hits"]["hits"][0]["_source"] except TransportError as te: pass - return user_details + return item_details -def save_user(user, user_id, index="users", doc_type="local"): +def es_save_data(index, doc_type, data_item, data_id,): from time import sleep - es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT - }]) - es.create(index, doc_type, body=user, id=user_id) + es.create(index, doc_type, body=data_item, id=data_id) sleep(1) # Delay 1 second to allow indexing -- cgit v1.2.3 From 49f3111bcac8d69dea52ff75bdd39e01e99c2f02 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 9 Feb 2018 10:11:21 +0300 Subject: Add check for elasticsearch * Add some extra checks to ensure that elasticsearch is running before presenting the UI to the user. --- wqflask/utility/elasticsearch_tools.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index ea636b2e..a0383033 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -7,6 +7,10 @@ try: "host": ELASTICSEARCH_HOST , "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None + + # Check if elasticsearch is running + if not es.ping(): + es = None except: es = None -- cgit v1.2.3 From 690e525a8a063d1be107c15521474052bd6ae2b4 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Fri, 9 Feb 2018 11:02:07 +0300 Subject: Check elasticsearch at point of use * Instead of checking for the state of elasticsearch at startup, check the state at the moment the user requests a feature that depends on elasticsearch. This reduces the chances that the user is dropped onto an exception page when elasticsearch server goes down. --- wqflask/utility/elasticsearch_tools.py | 3 --- wqflask/wqflask/user_manager.py | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index a0383033..4fc0035c 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -8,9 +8,6 @@ try: , "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None - # Check if elasticsearch is running - if not es.ping(): - es = None except: es = None diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 8f09c206..630be9aa 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -626,7 +626,7 @@ class LoginUser(object): return render_template( "new_security/login_user.html" , external_login=external_login - , es_server=es) + , es_server=es.ping()) else: user_details = get_user_by_unique_column("email_address", params["email_address"]) user = None -- cgit v1.2.3 From c68cffc414ac6d7536db36e79914f7d57af741c6 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Tue, 13 Feb 2018 13:48:28 +0300 Subject: Update module to make it more testable * Update functions to make them more testable. * Update code using updated functions. --- wqflask/utility/elasticsearch_tools.py | 40 +++++++++++++--------- wqflask/wqflask/user_manager.py | 61 +++++++++++++++++++--------------- 2 files changed, 58 insertions(+), 43 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 4fc0035c..a964b025 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,23 +1,31 @@ -es = None -try: - from elasticsearch import Elasticsearch, TransportError - from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT +from elasticsearch import Elasticsearch, TransportError +import logging - es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT - }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None - -except: +def get_elasticsearch_connection(): es = None + try: + from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + + es = Elasticsearch([{ + "host": ELASTICSEARCH_HOST + , "port": ELASTICSEARCH_PORT + }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None + + es_logger = logging.getLogger("elasticsearch") + es_logger.setLevel(logging.INFO) + es_logger.addHandler(logging.NullHandler()) + except: + es = None + + return es -def get_user_by_unique_column(column_name, column_value): - return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local") +def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"): + return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type) -def save_user(user, user_id): - es_save_data("users", "local", user, user_id) +def save_user(es, user, user_id): + es_save_data(es, "users", "local", user, user_id) -def get_item_by_unique_column(column_name, column_value, index, doc_type): +def get_item_by_unique_column(es, column_name, column_value, index, doc_type): item_details = None try: response = es.search( @@ -32,7 +40,7 @@ def get_item_by_unique_column(column_name, column_value, index, doc_type): pass return item_details -def es_save_data(index, doc_type, data_item, data_id,): +def es_save_data(es, index, doc_type, data_item, data_id,): from time import sleep es.create(index, doc_type, body=data_item, id=data_id) sleep(1) # Delay 1 second to allow indexing diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 630be9aa..6b667615 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -55,8 +55,9 @@ logger = getLogger(__name__) from base.data_set import create_datasets_list import requests -from utility.elasticsearch_tools import get_user_by_unique_column, save_user, es_save_data +from utility.elasticsearch_tools import * +es = get_elasticsearch_connection() THREE_DAYS = 60 * 60 * 24 * 3 #THREE_DAYS = 45 @@ -271,14 +272,18 @@ class RegisterUser(object): self.thank_you_mode = False self.errors = [] self.user = Bunch() + es = kw.get('es_connection', None) + + if not es: + self.errors.append("Missing connection object") self.user.email_address = kw.get('email_address', '').encode("utf-8").strip() if not (5 <= len(self.user.email_address) <= 50): self.errors.append('Email Address needs to be between 5 and 50 characters.') - - email_exists = get_user_by_unique_column("email_address", self.user.email_address) - if email_exists: - self.errors.append('User already exists with that email') + else: + email_exists = get_user_by_unique_column(es, "email_address", self.user.email_address) + if email_exists: + self.errors.append('User already exists with that email') self.user.full_name = kw.get('full_name', '').encode("utf-8").strip() if not (5 <= len(self.user.full_name) <= 50): @@ -305,7 +310,7 @@ class RegisterUser(object): self.user.confirmed = 1 self.user.registration_info = json.dumps(basic_info(), sort_keys=True) - save_user(self.user.__dict__, self.user.user_id) + save_user(es, self.user.__dict__, self.user.user_id) def set_password(password, user): pwfields = Bunch() @@ -381,7 +386,7 @@ class ForgotPasswordEmail(VerificationEmail): "email_address": toaddr, "timestamp": timestamp() } - es_save_data(self.key_prefix, "local", data, verification_code) + es_save_data(es, self.key_prefix, "local", data, verification_code) subject = self.subject body = render_template( @@ -431,7 +436,6 @@ def verify_email(): @app.route("/n/password_reset", methods=['GET']) def password_reset(): - from utility.elasticsearch_tools import get_item_by_unique_column logger.debug("in password_reset request.url is:", request.url) # We do this mainly just to assert that it's in proper form for displaying next page @@ -441,14 +445,16 @@ def password_reset(): hmac = request.args.get('hm') if verification_code: code_details = get_item_by_unique_column( - "verification_code", - verification_code, - ForgotPasswordEmail.key_prefix, - "local") + es + , "verification_code" + , verification_code + , ForgotPasswordEmail.key_prefix + , "local") if code_details: user_details = get_user_by_unique_column( - "email_address", - code_details["email_address"]) + es + , "email_address" + , code_details["email_address"]) if user_details: return render_template( "new_security/password_reset.html", user_encode=user_details["user_id"]) @@ -533,7 +539,7 @@ def github_oauth2(): result_dict = {arr[0]:arr[1] for arr in [tok.split("=") for tok in [token.encode("utf-8") for token in result.text.split("&")]]} github_user = get_github_user_details(result_dict["access_token"]) - user_details = get_user_by_unique_column("github_id", github_user["id"]) + user_details = get_user_by_unique_column(es, "github_id", github_user["id"]) if user_details == None: user_details = { "user_id": str(uuid.uuid4()) @@ -545,7 +551,7 @@ def github_oauth2(): , "active": 1 , "confirmed": 1 } - save_user(user_details, user_details["user_id"]) + save_user(es, user_details, user_details["user_id"]) url = "/n/login?type=github&uid="+user_details["user_id"] return redirect(url) @@ -566,7 +572,7 @@ def orcid_oauth2(): result = requests.post(ORCID_TOKEN_URL, data=data) result_dict = json.loads(result.text.encode("utf-8")) - user_details = get_user_by_unique_column("orcid", result_dict["orcid"]) + user_details = get_user_by_unique_column(es, "orcid", result_dict["orcid"]) if user_details == None: user_details = { "user_id": str(uuid4()) @@ -580,7 +586,7 @@ def orcid_oauth2(): , "active": 1 , "confirmed": 1 } - save_user(user_details, user_details["user_id"]) + save_user(es, user_details, user_details["user_id"]) url = "/n/login?type=orcid&uid="+user_details["user_id"] else: flash("There was an error getting code from ORCID") @@ -599,7 +605,7 @@ class LoginUser(object): def oauth2_login(self, login_type, user_id): """Login via an OAuth2 provider""" - user_details = get_user_by_unique_column("user_id", user_id) + user_details = get_user_by_unique_column(es, "user_id", user_id) if user_details: user = model.User() user.id = user_details["user_id"] @@ -616,7 +622,6 @@ class LoginUser(object): logger.debug("in login params are:", params) if not params: from utility.tools import GITHUB_AUTH_URL, ORCID_AUTH_URL - from utility.elasticsearch_tools import es external_login = None if GITHUB_AUTH_URL or ORCID_AUTH_URL: external_login={ @@ -628,8 +633,9 @@ class LoginUser(object): , external_login=external_login , es_server=es.ping()) else: - user_details = get_user_by_unique_column("email_address", params["email_address"]) + user_details = get_user_by_unique_column(es, "email_address", params["email_address"]) user = None + valid = None if user_details: user = model.User(); for key in user_details: @@ -672,7 +678,7 @@ class LoginUser(object): else: if user: self.unsuccessful_login(user) - flash("Invalid email-address or password. Please try again.", "alert-error") + flash("Invalid email-address or password. Please try again.", "alert-danger") response = make_response(redirect(url_for('login'))) return response @@ -739,7 +745,7 @@ def forgot_password(): def forgot_password_submit(): params = request.form email_address = params['email_address'] - user_details = get_user_by_unique_column("email_address", email_address) + user_details = get_user_by_unique_column(es, "email_address", email_address) if user_details: ForgotPasswordEmail(user_details["email_address"]) # try: @@ -815,16 +821,17 @@ def register(): params = request.form if request.form else request.args + params = params.to_dict(flat=True) + params["es_connection"] = es if params: logger.debug("Attempting to register the user...") result = RegisterUser(params) errors = result.errors - if result.thank_you_mode: - assert not errors, "Errors while in thank you mode? That seems wrong..." - return render_template("new_security/registered.html", - subject=VerificationEmail.subject) + if len(errors) == 0: + flash("Registration successful. You may login with your new account", "alert-info") + return redirect(url_for("login")) return render_template("new_security/register_user.html", values=params, errors=errors) -- cgit v1.2.3 From 3de1ecfa37b73b4cb011b634c8b4afc2362f858c Mon Sep 17 00:00:00 2001 From: Pjotr Prins Date: Thu, 15 Feb 2018 10:30:52 +0000 Subject: Fixing authentication stuff so it uses parameters properly. Also no PYTHONPATH needed as it is now in the Guix build. --- bin/genenetwork2 | 12 +++++++----- etc/default_settings.py | 18 ++++++++++++++++++ wqflask/run_gunicorn.py | 3 +++ wqflask/utility/elasticsearch_tools.py | 16 ++++++++++++---- wqflask/utility/tools.py | 29 +++++++++++------------------ wqflask/wqflask/user_manager.py | 7 +++++-- 6 files changed, 56 insertions(+), 29 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/bin/genenetwork2 b/bin/genenetwork2 index 74ed7f9b..31fefbd3 100755 --- a/bin/genenetwork2 +++ b/bin/genenetwork2 @@ -95,11 +95,13 @@ export WQFLASK_OVERRIDES=$overrides # JSON echo WQFLASK_SETTINGS=$settings echo WQFLASK_OVERRIDES=$overrides -if [ -z $ELASTICSEARCH_PROFILE ]; then - echo -e "\033[1;33mWARNING: Elastic Search profile has not been set - use ELASTICSEARCH_PROFILE\033[0m"; -else - PYTHONPATH="$PYTHONPATH${PYTHONPATH:+:}$ELASTICSEARCH_PROFILE/lib/python2.7/site-packages" -fi +# This is a temporary hack to inject ES - should have added python2-elasticsearch package to guix instead +# if [ -z $ELASTICSEARCH_PROFILE ]; then +# echo -e "WARNING: Elastic Search profile has not been set - use ELASTICSEARCH_PROFILE"; +# else +# PYTHONPATH="$PYTHONPATH${PYTHONPATH:+:}$ELASTICSEARCH_PROFILE/lib/python2.7/site-packages" +# fi + if [ -z $GN2_PROFILE ] ; then echo "WARNING: GN2_PROFILE has not been set - you need the environment, so I hope you know what you are doing!" export GN2_PROFILE=$(dirname $(dirname $(which genenetwork2))) diff --git a/etc/default_settings.py b/etc/default_settings.py index 699d21f1..a70d8aec 100644 --- a/etc/default_settings.py +++ b/etc/default_settings.py @@ -41,6 +41,24 @@ SECURITY_POST_LOGIN_VIEW = "/thank_you" SERVER_PORT = 5003 # running on localhost SECRET_HMAC_CODE = '\x08\xdf\xfa\x93N\x80\xd9\\H@\\\x9f`\x98d^\xb4a;\xc6OM\x946a\xbc\xfc\x80:*\xebc' +GITHUB_CLIENT_ID = "UNKNOWN" +GITHUB_CLIENT_SECRET = "UNKNOWN" +GITHUB_AUTH_URL = "UNKNOWN" +GITHUB_API_URL = "UNKNOWN" + +ORCID_CLIENT_ID = "UNKNOWN" +ORCID_CLIENT_SECRET = "UNKNOWN" +ORCID_AUTH_URL = "UNKNOWN" +ORCID_TOKEN_URL = "UNKNOWN" + +ELASTICSEARCH_HOST = "localhost" +ELASTICSEARCH_PORT = '9200' + +SMTP_CONNECT = "UNKNOWN" +SMTP_USERNAME = "UNKNOWN" +SMTP_PASSWORD = "UNKNOWN" + + # ---- Behavioural settings (defaults) note that logger and log levels can # be overridden at the module level and with enviroment settings WEBSERVER_MODE = 'DEV' # Python webserver mode (DEBUG|DEV|PROD) diff --git a/wqflask/run_gunicorn.py b/wqflask/run_gunicorn.py index 14a2d689..ebe3add5 100644 --- a/wqflask/run_gunicorn.py +++ b/wqflask/run_gunicorn.py @@ -11,6 +11,9 @@ print "Starting up Gunicorn process" from wqflask import app +app.config['SESSION_TYPE'] = 'filesystem' +app.config['SECRET_KEY'] = 'super secret key' + @app.route("/gunicorn") def hello(): return "

Hello There!

" diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index a964b025..2d3d5add 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,10 +1,18 @@ from elasticsearch import Elasticsearch, TransportError import logging +from utility.logger import getLogger +logger = getLogger(__name__) + +from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + def get_elasticsearch_connection(): + logger.info("get_elasticsearch_connection") es = None try: - from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT + assert(ELASTICSEARCH_HOST) + assert(ELASTICSEARCH_PORT) + logger.info("ES HOST",ELASTICSEARCH_HOST) es = Elasticsearch([{ "host": ELASTICSEARCH_HOST @@ -31,12 +39,12 @@ def get_item_by_unique_column(es, column_name, column_value, index, doc_type): response = es.search( index = index , doc_type = doc_type - , body = { - "query": { "match": { column_name: column_value } } + , body = { + "query": { "match": { column_name: column_value } } }) if len(response["hits"]["hits"]) > 0: item_details = response["hits"]["hits"][0]["_source"] - except TransportError as te: + except TransportError as te: pass return item_details diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py index 005f9b0f..8c9fed96 100644 --- a/wqflask/utility/tools.py +++ b/wqflask/utility/tools.py @@ -251,32 +251,25 @@ assert_dir(JS_GUIX_PATH) JS_GN_PATH = get_setting('JS_GN_PATH') # assert_dir(JS_GN_PATH) -def get_setting_safe(setting): - try: - return get_setting(setting) - except: - print("Could not find the setting '", setting, "'. Continuing with value unset") - return None - -GITHUB_CLIENT_ID = get_setting_safe('GITHUB_CLIENT_ID') -GITHUB_CLIENT_SECRET = get_setting_safe('GITHUB_CLIENT_SECRET') +GITHUB_CLIENT_ID = get_setting('GITHUB_CLIENT_ID') +GITHUB_CLIENT_SECRET = get_setting('GITHUB_CLIENT_SECRET') GITHUB_AUTH_URL = None if GITHUB_CLIENT_ID and GITHUB_CLIENT_SECRET: GITHUB_AUTH_URL = "https://github.com/login/oauth/authorize?client_id="+GITHUB_CLIENT_ID+"&client_secret="+GITHUB_CLIENT_SECRET -GITHUB_API_URL = get_setting_safe('GITHUB_API_URL') -ORCID_CLIENT_ID = get_setting_safe('ORCID_CLIENT_ID') -ORCID_CLIENT_SECRET = get_setting_safe('ORCID_CLIENT_SECRET') +GITHUB_API_URL = get_setting('GITHUB_API_URL') +ORCID_CLIENT_ID = get_setting('ORCID_CLIENT_ID') +ORCID_CLIENT_SECRET = get_setting('ORCID_CLIENT_SECRET') ORCID_AUTH_URL = None if ORCID_CLIENT_ID and ORCID_CLIENT_SECRET: ORCID_AUTH_URL = "https://sandbox.orcid.org/oauth/authorize?response_type=code&scope=/authenticate&show_login=true&client_id="+ORCID_CLIENT_ID+"&client_secret="+ORCID_CLIENT_SECRET -ORCID_TOKEN_URL = get_setting_safe('ORCID_TOKEN_URL') +ORCID_TOKEN_URL = get_setting('ORCID_TOKEN_URL') -ELASTICSEARCH_HOST = get_setting_safe('ELASTICSEARCH_HOST') -ELASTICSEARCH_PORT = get_setting_safe('ELASTICSEARCH_PORT') +ELASTICSEARCH_HOST = get_setting('ELASTICSEARCH_HOST') +ELASTICSEARCH_PORT = get_setting('ELASTICSEARCH_PORT') -SMTP_CONNECT = get_setting_safe('SMTP_CONNECT') -SMTP_USERNAME = get_setting_safe('SMTP_USERNAME') -SMTP_PASSWORD = get_setting_safe('SMTP_PASSWORD') +SMTP_CONNECT = get_setting('SMTP_CONNECT') +SMTP_USERNAME = get_setting('SMTP_USERNAME') +SMTP_PASSWORD = get_setting('SMTP_PASSWORD') PYLMM_COMMAND = app_set("PYLMM_COMMAND",pylmm_command()) GEMMA_COMMAND = app_set("GEMMA_COMMAND",gemma_command()) diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 6b667615..c8471cb1 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -55,9 +55,8 @@ logger = getLogger(__name__) from base.data_set import create_datasets_list import requests -from utility.elasticsearch_tools import * +from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column, save_user -es = get_elasticsearch_connection() THREE_DAYS = 60 * 60 * 24 * 3 #THREE_DAYS = 45 @@ -479,6 +478,7 @@ def password_reset_step2(): password = request.form['password'] set_password(password, user) + es = get_elasticsearch_connection() es.update( index = "users" , doc_type = "local" @@ -620,6 +620,7 @@ class LoginUser(object): """Login through the normal form""" params = request.form if request.form else request.args logger.debug("in login params are:", params) + es = get_elasticsearch_connection() if not params: from utility.tools import GITHUB_AUTH_URL, ORCID_AUTH_URL external_login = None @@ -628,6 +629,7 @@ class LoginUser(object): "github": GITHUB_AUTH_URL, "orcid": ORCID_AUTH_URL } + assert(es is not None) return render_template( "new_security/login_user.html" , external_login=external_login @@ -822,6 +824,7 @@ def register(): params = request.form if request.form else request.args params = params.to_dict(flat=True) + es = get_elasticsearch_connection() params["es_connection"] = es if params: -- cgit v1.2.3 From 7aae77762e47c9269591e138ab25320d3ed3a85f Mon Sep 17 00:00:00 2001 From: Pjotr Prins Date: Tue, 20 Mar 2018 11:02:25 +0000 Subject: Refactor startup config for gunicorn and werkzeug --- wqflask/run_gunicorn.py | 6 ++-- wqflask/runserver.py | 17 ++-------- wqflask/utility/elasticsearch_tools.py | 5 +++ wqflask/utility/startup_config.py | 39 ++++++++++++++++++++++ wqflask/utility/tools.py | 4 ++- .../wqflask/templates/new_security/login_user.html | 8 ++++- 6 files changed, 59 insertions(+), 20 deletions(-) create mode 100644 wqflask/utility/startup_config.py (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/run_gunicorn.py b/wqflask/run_gunicorn.py index ebe3add5..adffdca3 100644 --- a/wqflask/run_gunicorn.py +++ b/wqflask/run_gunicorn.py @@ -7,12 +7,12 @@ # from flask import Flask # application = Flask(__name__) -print "Starting up Gunicorn process" +print "===> Starting up Gunicorn process" from wqflask import app +from utility.startup_config import app_config -app.config['SESSION_TYPE'] = 'filesystem' -app.config['SECRET_KEY'] = 'super secret key' +app_config() @app.route("/gunicorn") def hello(): diff --git a/wqflask/runserver.py b/wqflask/runserver.py index a0c76e51..5f41d04d 100644 --- a/wqflask/runserver.py +++ b/wqflask/runserver.py @@ -21,22 +21,9 @@ GREEN = '\033[92m' BOLD = '\033[1m' ENDC = '\033[0m' -import os -app.config['SECRET_KEY'] = os.urandom(24) +from utility.startup_config import app_config -from utility.tools import WEBSERVER_MODE,get_setting_int,get_setting,get_setting_bool - -port = get_setting_int("SERVER_PORT") - -print("GN2 API server URL is ["+BLUE+get_setting("GN_SERVER_URL")+ENDC+"]") - -if get_setting_bool("USE_GN_SERVER"): - import requests - page = requests.get(get_setting("GN_SERVER_URL")) - if page.status_code != 200: - raise Exception("API server not found!") - -print("GN2 is running. Visit %s[http://localhost:%s/%s](%s)" % (BLUE,str(port),ENDC,get_setting("WEBSERVER_URL"))) +app_config() werkzeug_logger = logging.getLogger('werkzeug') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 2d3d5add..734379f7 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -6,6 +6,11 @@ logger = getLogger(__name__) from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT +def test_elasticsearch_connection(): + es = Elasticsearch(['http://'+ELASTICSEARCH_HOST+":"+ELASTICSEARCH_PORT+'/'], verify_certs=True) + if not es.ping(): + logger.warning("Elasticsearch is DOWN") + def get_elasticsearch_connection(): logger.info("get_elasticsearch_connection") es = None diff --git a/wqflask/utility/startup_config.py b/wqflask/utility/startup_config.py new file mode 100644 index 00000000..02e8e56c --- /dev/null +++ b/wqflask/utility/startup_config.py @@ -0,0 +1,39 @@ + +from wqflask import app +from utility.tools import WEBSERVER_MODE, show_settings, get_setting_int, get_setting, get_setting_bool + +import utility.logger +logger = utility.logger.getLogger(__name__ ) + +BLUE = '\033[94m' +GREEN = '\033[92m' +BOLD = '\033[1m' +ENDC = '\033[0m' + +def app_config(): + app.config['SESSION_TYPE'] = 'filesystem' + if not app.config.get('SECRET_KEY'): + import os + app.config['SECRET_KEY'] = str(os.urandom(24)) + + mode = WEBSERVER_MODE + if mode == "DEV" or mode == "DEBUG": + app.config['TEMPLATES_AUTO_RELOAD'] = True + if mode == "DEBUG": + app.config['EXPLAIN_TEMPLATE_LOADING'] = True + print("==========================================") + show_settings() + + port = get_setting_int("SERVER_PORT") + + if get_setting_bool("USE_GN_SERVER"): + print("GN2 API server URL is ["+BLUE+get_setting("GN_SERVER_URL")+ENDC+"]") + import requests + page = requests.get(get_setting("GN_SERVER_URL")) + if page.status_code != 200: + raise Exception("API server not found!") + + import utility.elasticsearch_tools as es + es.test_elasticsearch_connection() + + print("GN2 is running. Visit %s[http://localhost:%s/%s](%s)" % (BLUE,str(port),ENDC,get_setting("WEBSERVER_URL"))) diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py index 4b4cd633..59bb49d8 100644 --- a/wqflask/utility/tools.py +++ b/wqflask/utility/tools.py @@ -220,7 +220,7 @@ def show_settings(): logger.info(OVERRIDES) logger.info(BLUE+"Mr. Mojo Risin 2"+ENDC) - print "runserver.py: ****** Webserver configuration ******" + print "runserver.py: ****** Webserver configuration - k,v pairs from app.config ******" keylist = app.config.keys() keylist.sort() for k in keylist: @@ -269,6 +269,8 @@ if ORCID_CLIENT_ID != 'UNKNOWN' and ORCID_CLIENT_SECRET: ELASTICSEARCH_HOST = get_setting('ELASTICSEARCH_HOST') ELASTICSEARCH_PORT = get_setting('ELASTICSEARCH_PORT') +import utility.elasticsearch_tools as es +es.test_elasticsearch_connection() SMTP_CONNECT = get_setting('SMTP_CONNECT') SMTP_USERNAME = get_setting('SMTP_USERNAME') diff --git a/wqflask/wqflask/templates/new_security/login_user.html b/wqflask/wqflask/templates/new_security/login_user.html index 6d597f6b..949760b6 100644 --- a/wqflask/wqflask/templates/new_security/login_user.html +++ b/wqflask/wqflask/templates/new_security/login_user.html @@ -15,7 +15,6 @@

Don't have an account?

- {% if es_server: %} Create a new account {% else: %} @@ -92,6 +91,13 @@ Please try again later.

{% endif %} + {% if not es_server and not external_login: %} +
+
+ Note: it is safe to use GeneNetwork without a login. Login is only required for keeping track of + collections and getting access to some types of restricted data. +
+ {% endif %} -- cgit v1.2.3 From 8145507d6d617554cf996e6cebf286d30ae64df0 Mon Sep 17 00:00:00 2001 From: Pjotr Prins Date: Wed, 4 Apr 2018 16:58:21 +0000 Subject: ES: doc --- bin/genenetwork2 | 18 ++++++++++----- doc/elasticsearch.org | 41 ++++++++++++++++++++++++++++++++++ wqflask/utility/elasticsearch_tools.py | 9 ++++---- wqflask/wqflask/user_manager.py | 23 +++++-------------- 4 files changed, 62 insertions(+), 29 deletions(-) create mode 100644 doc/elasticsearch.org (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/bin/genenetwork2 b/bin/genenetwork2 index 3f06e7f9..18e02388 100755 --- a/bin/genenetwork2 +++ b/bin/genenetwork2 @@ -21,10 +21,18 @@ # # env GN2_PROFILE=~/opt/gn-latest-guix ./bin/genenetwork2 ~/my_settings.py # -# To run a maintenance script with settings (instead of the webserver) add that with -# a -c switch, e.g. +# To run a maintenance python script with settings (instead of the +# webserver) add that with a -c switch, e.g. # -# env GN2_PROFILE=~/opt/gn-latest-guix ./bin/genenetwork2 ~/my_overrides.json -c ./wqflask/maintenance/gen_select_dataset.py +# env GN2_PROFILE=~/opt/gn-latest-guix ./bin/genenetwork2 -c ./wqflask/maintenance/gen_select_dataset.py +# +# To run any script in the environment +# +# env GN2_PROFILE=~/opt/gn-latest-guix ./bin/genenetwork2 ./etc/default_settings.py -cli echo "HELLO WORLD" +# +# To get a python REPL(!) +# +# env GN2_PROFILE=~/opt/gn-latest-guix ./bin/genenetwork2 ./etc/default_settings.py -cli python # # For development you may want to run # @@ -114,7 +122,6 @@ else export PATH=$GN2_PROFILE/bin:$PATH export PYTHONPATH="$GN2_PROFILE/lib/python2.7/site-packages" # never inject another PYTHONPATH!! export R_LIBS_SITE=$GN2_PROFILE/site-library - export GEM_PATH=$GN2_PROFILE/lib/ruby/gems/2.4.0 export JS_GUIX_PATH=$GN2_PROFILE/share/genenetwork2/javascript export GUIX_GTK3_PATH="$GN2_PROFILE/lib/gtk-3.0" export GI_TYPELIB_PATH="$GN2_PROFILE/lib/girepository-1.0" @@ -134,7 +141,6 @@ else done done <<< "$PYTHONPATH" if [ ! -d $R_LIBS_SITE ] ; then echo "R_LIBS_SITE not valid "$R_LIBS_SITE ; exit 1 ; fi - if [ ! -d $GEM_PATH ] ; then echo "GEM_PATH not valid "$GEM_PATH ; exit 1 ; fi fi if [ -z $PYTHONPATH ] ; then echo "ERROR PYTHONPATH has not been set - use GN2_PROFILE!" @@ -170,9 +176,9 @@ if [ "$1" = '-c' ] ; then python $cmd $* exit $? fi + # Now handle command parameter -cli which runs in bash if [ "$1" = "-cli" ] ; then - echo "HERE" cd $GN2_BASE_DIR/wqflask cmd=$2 echo PYTHONPATH=$PYTHONPATH diff --git a/doc/elasticsearch.org b/doc/elasticsearch.org new file mode 100644 index 00000000..18adfc8b --- /dev/null +++ b/doc/elasticsearch.org @@ -0,0 +1,41 @@ +* Elasticsearch + +To get the right environment, first you can get a python REPL with something like + +: env GN2_PROFILE=~/opt/gn-latest ./bin/genenetwork2 ../etc/default_settings.py -cli python + +(make sure to use the correct GN2_PROFILE!) + +Next try + +#+BEGIN_SRC python + +from elasticsearch import Elasticsearch, TransportError + +es = Elasticsearch([{ "host": 'localhost', "port": '9200' }]) + +# Dump all data + +es.search("*") + +# To fetch an E-mail record from the users index + +record = es.search( + index = 'users', doc_type = 'local', body = { + "query": { "match": { "email_address": "myname@email.com" } } + }) + +# It is also possible to do wild card matching + +q = { "query": { "wildcard" : { "full_name" : "pjot*" } }} +es.search(index = 'users', doc_type = 'local', body = q) + +# To get elements from that record: + +record['hits']['hits'][0][u'_source']['full_name'] +u'Pjotr' + +record['hits']['hits'][0][u'_source']['email_address'] +u"myname@email.com" + +#+END_SRC diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 734379f7..1dba357d 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -12,6 +12,7 @@ def test_elasticsearch_connection(): logger.warning("Elasticsearch is DOWN") def get_elasticsearch_connection(): + """Return a connection to ES. Returns None on failure""" logger.info("get_elasticsearch_connection") es = None try: @@ -20,14 +21,14 @@ def get_elasticsearch_connection(): logger.info("ES HOST",ELASTICSEARCH_HOST) es = Elasticsearch([{ - "host": ELASTICSEARCH_HOST - , "port": ELASTICSEARCH_PORT + "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None es_logger = logging.getLogger("elasticsearch") es_logger.setLevel(logging.INFO) es_logger.addHandler(logging.NullHandler()) except: + logger.error("Failed to get elasticsearch connection") es = None return es @@ -42,9 +43,7 @@ def get_item_by_unique_column(es, column_name, column_value, index, doc_type): item_details = None try: response = es.search( - index = index - , doc_type = doc_type - , body = { + index = index, doc_type = doc_type, body = { "query": { "match": { column_name: column_value } } }) if len(response["hits"]["hits"]) > 0: diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index ac3824a7..ead919fc 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -1,45 +1,30 @@ from __future__ import print_function, division, absolute_import -"""Used to Access things in template like this: -(BUT NOW OUT OF DATE) - - x: {{ g.identity.name }} - security: {{ security.__dict__ }} - -""" - import os import hashlib import datetime import time import logging - import uuid import hashlib import hmac import base64 - import urlparse import simplejson as json #from redis import StrictRedis -import redis +import redis # used for collections Redis = redis.StrictRedis() - from flask import (Flask, g, render_template, url_for, request, make_response, redirect, flash, abort) from wqflask import app - - from pprint import pformat as pf -from wqflask import pbkdf2 - +from wqflask import pbkdf2 # password hashing from wqflask.database import db_session - from wqflask import model from utility import Bunch, Struct, after @@ -62,8 +47,8 @@ THREE_DAYS = 60 * 60 * 24 * 3 def timestamp(): return datetime.datetime.utcnow().isoformat() - class AnonUser(object): + """Anonymous user handling""" cookie_name = 'anon_user_v8' def __init__(self): @@ -169,6 +154,8 @@ def create_signed_cookie(): return the_uuid, uuid_signed class UserSession(object): + """Logged in user handling""" + cookie_name = 'session_id_v2' def __init__(self): -- cgit v1.2.3 From fdd28defcaf3326f3c6b6507124708d83a1da119 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Sun, 15 Apr 2018 11:57:09 +0300 Subject: Deactivate analysis of email_address field * Prevent elasticsearch from analysing and tokenising the email_address field so as to avoid issue with getting back all email addresses with the same domain as the one being searched for. --- wqflask/utility/elasticsearch_tools.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index d35cb5ee..7d2ee8c9 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -24,6 +24,8 @@ def get_elasticsearch_connection(): "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None + setup_users_index(es) + es_logger = logging.getLogger("elasticsearch") es_logger.setLevel(logging.INFO) es_logger.addHandler(logging.NullHandler()) @@ -33,6 +35,17 @@ def get_elasticsearch_connection(): return es +def setup_users_index(es_connection): + if es_connection: + index_settings = { + "properties": { + "email_address": { + "type": "string" + , "index": "not_analyzed"}}} + + es_connection.indices.create(index='users', ignore=400) + es_connection.indices.put_mapping(body=index_settings, index="users", doc_type="local") + def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"): return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type) -- cgit v1.2.3 From dda4697505aea2cd950533dfb3a0dfb0e66ec018 Mon Sep 17 00:00:00 2001 From: Pjotr Prins Date: Mon, 16 Apr 2018 09:00:52 +0000 Subject: Docs on elasticsearch use --- README.md | 24 ++++++++++++++------ bin/test-website | 2 +- wqflask/utility/elasticsearch_tools.py | 41 ++++++++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+), 8 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/README.md b/README.md index 3e7e64d0..59645994 100644 --- a/README.md +++ b/README.md @@ -17,25 +17,35 @@ deploy GN2 and dependencies as a self contained unit on any machine. The database can be run separately as well as the source tree (for developers). See the [installation docs](doc/README.org). -## Test +## Run Once installed GN2 can be run online through a browser interface ```sh -./bin/genenetwork2 +genenetwork2 ``` -(default is http://localhost:5003/). For more examples, including running scripts and a Python REPL -see the startup script [./bin/genenetwork2](https://github.com/genenetwork/genenetwork2/blob/testing/bin/genenetwork2). +(default is http://localhost:5003/). For full examples (you'll need to +set a number of environment variables), including running scripts and +a Python REPL, see the startup script +[./bin/genenetwork2](https://github.com/genenetwork/genenetwork2/blob/testing/bin/genenetwork2). +## Testing -We are building up automated -testing using [mechanize](https://github.com/genenetwork/genenetwork2/tree/master/test/lib) which can be run with +We are building 'Mechanical Rob' automated testing using Python +[requests](https://github.com/genenetwork/genenetwork2/tree/master/test/lib) +which can be run with something like ```sh -./bin/test-website +env GN2_PROFILE=~/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py -a http://localhost:5003 ``` +The GN2_PROFILE is the Guix profile that contains all +dependencies. The ./bin/genenetwork2 script sets up the environment +and executes test-website.py in a Python interpreter. The -a switch +says to run all tests and the URL points to the running GN2 http +server. + ## Documentation User documentation can be found diff --git a/bin/test-website b/bin/test-website index 5935f016..7fbcfd2f 100755 --- a/bin/test-website +++ b/bin/test-website @@ -2,6 +2,6 @@ if [ -z $GN2_PROFILE ]; then echo "Run request tests with something like" - echo env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py http://localhost:5003 + echo env GN2_PROFILE=/home/wrk/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py -a http://localhost:5003 exit 1 fi diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 7d2ee8c9..4d4a9844 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -1,3 +1,44 @@ +# Elasticsearch support +# +# Some helpful commands to view the database: +# +# You can test the server being up with +# +# curl -H 'Content-Type: application/json' http://localhost:9200 +# +# List all indices +# +# curl -H 'Content-Type: application/json' 'localhost:9200/_cat/indices?v' +# +# To see the users index 'table' +# +# curl http://localhost:9200/users +# +# To list all user ids +# +# curl -H 'Content-Type: application/json' http://localhost:9200/users/local/_search?pretty=true -d ' +# { +# "query" : { +# "match_all" : {} +# }, +# "stored_fields": [] +# }' +# +# To view a record +# +# curl -H 'Content-Type: application/json' http://localhost:9200/users/local/_search?pretty=true -d ' +# { +# "query" : { +# "match" : { "email_address": "pjotr2017@thebird.nl"} +# } +# }' +# +# +# To delete the users index and data (dangerous!) +# +# curl -XDELETE -H 'Content-Type: application/json' 'localhost:9200/users' + + from elasticsearch import Elasticsearch, TransportError import logging -- cgit v1.2.3 From bc1672f8617c56684ae3aeda7018362e818c46d6 Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Mon, 16 Apr 2018 17:25:14 +0300 Subject: Update mappings for Elasticsearch 6.2. Update logger * Update the indexes mappings to be compatible with the newer Elasticsearch 6.2.* series. Close the index before updating it, and reopen it after to help with the re-indexing of the data. * Update the error logger to include the exception that was thrown. --- wqflask/utility/elasticsearch_tools.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 7d2ee8c9..0dc59d43 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -29,8 +29,8 @@ def get_elasticsearch_connection(): es_logger = logging.getLogger("elasticsearch") es_logger.setLevel(logging.INFO) es_logger.addHandler(logging.NullHandler()) - except: - logger.error("Failed to get elasticsearch connection") + except Exception as e: + logger.error("Failed to get elasticsearch connection", e) es = None return es @@ -40,11 +40,12 @@ def setup_users_index(es_connection): index_settings = { "properties": { "email_address": { - "type": "string" - , "index": "not_analyzed"}}} + "type": "keyword"}}} es_connection.indices.create(index='users', ignore=400) + es_connection.indices.close(index="users") es_connection.indices.put_mapping(body=index_settings, index="users", doc_type="local") + es_connection.indices.open(index="users") def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"): return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type) -- cgit v1.2.3 From fcc43dd4008692b27935d90fcfd134d6c5d9495e Mon Sep 17 00:00:00 2001 From: Muriithi Frederick Muriuki Date: Mon, 16 Apr 2018 18:46:29 +0300 Subject: Remove statements that might be causing issues * I can't swear on this, but it seems the presence of these statements was causing elasticsearch to act weird. --- wqflask/utility/elasticsearch_tools.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index 76dcaebf..cce210c3 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -84,9 +84,7 @@ def setup_users_index(es_connection): "type": "keyword"}}} es_connection.indices.create(index='users', ignore=400) - es_connection.indices.close(index="users") es_connection.indices.put_mapping(body=index_settings, index="users", doc_type="local") - es_connection.indices.open(index="users") def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"): return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type) -- cgit v1.2.3 From 67e8f12e103f48329d8b3e38125c0e84b9dc089d Mon Sep 17 00:00:00 2001 From: zsloan Date: Thu, 17 May 2018 16:32:44 +0000 Subject: Added script to quantile normalize a data set and enter its normalized sample data into ElasticSearch Added option to replace trait page sample/strain values with normalized ones Began editing Lei's scatterplot code Changed elasticsearch_tools' get_elasticsearch_connection so that it can also be used for purposes other than user authentication (by adding a "for_user" parameter) --- wqflask/base/anon_collection.py | 22 - wqflask/base/trait_collection.py | 53 --- wqflask/maintenance/quantile_normalize.py | 129 ++++++ wqflask/utility/elasticsearch_tools.py | 5 +- wqflask/wqflask/correlation/corr_scatter_plot.py | 53 +-- wqflask/wqflask/show_trait/SampleList.py | 77 ++-- wqflask/wqflask/show_trait/show_trait.py | 4 - .../wqflask/static/new/css/corr_scatter_plot.css | 40 +- .../static/new/javascript/draw_corr_scatterplot.js | 71 ++- .../wqflask/static/new/javascript/show_trait.js | 13 + .../new/javascript/show_trait_mapping_tools.js | 2 +- wqflask/wqflask/templates/corr_scatterplot.html | 476 +++++++++++---------- .../wqflask/templates/show_trait_edit_data.html | 6 +- 13 files changed, 567 insertions(+), 384 deletions(-) delete mode 100644 wqflask/base/anon_collection.py delete mode 100644 wqflask/base/trait_collection.py create mode 100644 wqflask/maintenance/quantile_normalize.py (limited to 'wqflask/utility/elasticsearch_tools.py') diff --git a/wqflask/base/anon_collection.py b/wqflask/base/anon_collection.py deleted file mode 100644 index dd1aa27f..00000000 --- a/wqflask/base/anon_collection.py +++ /dev/null @@ -1,22 +0,0 @@ -class AnonCollection(TraitCollection): - - def __init__(self, anon_id): - self.anon_id = anon_id - self.collection_members = Redis.smembers(self.anon_id) - print("self.collection_members is:", self.collection_members) - self.num_members = len(self.collection_members) - - - @app.route("/collections/remove", methods=('POST',)) - def remove_traits(traits_to_remove): - print("traits_to_remove:", traits_to_remove) - for trait in traits_to_remove: - Redis.srem(self.anon_id, trait) - - members_now = self.collection_members - traits_to_remove - print("members_now:", members_now) - print("Went from {} to {} members in set.".format(len(self.collection_members), len(members_now))) - - # We need to return something so we'll return this...maybe in the future - # we can use it to check the results - return str(len(members_now)) diff --git a/wqflask/base/trait_collection.py b/wqflask/base/trait_collection.py deleted file mode 100644 index d388a3af..00000000 --- a/wqflask/base/trait_collection.py +++ /dev/null @@ -1,53 +0,0 @@ -class TraitCollection(object): - - def __init__(self, is_anon=False): - self.is_anon = is_anon - - - @app.route("/collections/remove", methods=('POST',)) - def remove_traits(): - if is_anon: - AnonCollection.remove_traits() - else: - UserCollection.remove_traits() - - params = request.form - print("params are:", params) - uc_id = params['uc_id'] - uc = model.UserCollection.query.get(uc_id) - traits_to_remove = params.getlist('traits[]') - print("traits_to_remove are:", traits_to_remove) - traits_to_remove = process_traits(traits_to_remove) - print("\n\n after processing, traits_to_remove:", traits_to_remove) - all_traits = uc.members_as_set() - print(" all_traits:", all_traits) - members_now = all_traits - traits_to_remove - print(" members_now:", members_now) - print("Went from {} to {} members in set.".format(len(all_traits), len(members_now))) - uc.members = json.dumps(list(members_now)) - uc.changed_timestamp = datetime.datetime.utcnow() - db_session.commit() - - # We need to return something so we'll return this...maybe in the future - # we can use it to check the results - return str(len(members_now)) - - def __init__(self, anon_id) - self.anon_key = anon_key - self.collection_members = Redis.smembers(self.anon_id) - print("self.collection_members is:", self.collection_members) - self.num_members = len(self.collection_members) - - - @app.route("/collections/remove", methods=('POST',)) - def remove_traits(traits_to_remove): - print("traits_to_remove:", traits_to_remove) - for trait in traits_to_remove: - Redis.srem(self.anon_id, trait) - members_now = self.collection_members - traits_to_remove - print("members_now:", members_now) - print("Went from {} to {} members in set.".format(len(self.collection_members), len(members_now))) - - # We need to return something so we'll return this...maybe in the future - # we can use it to check the results - return str(len(members_now)) diff --git a/wqflask/maintenance/quantile_normalize.py b/wqflask/maintenance/quantile_normalize.py new file mode 100644 index 00000000..c11073fb --- /dev/null +++ b/wqflask/maintenance/quantile_normalize.py @@ -0,0 +1,129 @@ +from __future__ import absolute_import, print_function, division + +import sys +sys.path.insert(0,'./') + +from itertools import izip + +import MySQLdb +import urlparse + +import numpy as np +import pandas as pd +from elasticsearch import Elasticsearch, TransportError +from elasticsearch.helpers import bulk + +from flask import Flask, g, request + +from wqflask import app +from utility.elasticsearch_tools import get_elasticsearch_connection +from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT, SQL_URI + +def parse_db_uri(): + """Converts a database URI to the db name, host name, user name, and password""" + + parsed_uri = urlparse.urlparse(SQL_URI) + + db_conn_info = dict( + db = parsed_uri.path[1:], + host = parsed_uri.hostname, + user = parsed_uri.username, + passwd = parsed_uri.password) + + print(db_conn_info) + return db_conn_info + +def create_dataframe(input_file): + with open(input_file) as f: + ncols = len(f.readline().split("\t")) + + input_array = np.loadtxt(open(input_file, "rb"), delimiter="\t", skiprows=1, usecols=range(1, ncols)) + return pd.DataFrame(input_array) + +#This function taken from https://github.com/ShawnLYU/Quantile_Normalize +def quantileNormalize(df_input): + df = df_input.copy() + #compute rank + dic = {} + for col in df: + dic.update({col : sorted(df[col])}) + sorted_df = pd.DataFrame(dic) + rank = sorted_df.mean(axis = 1).tolist() + #sort + for col in df: + t = np.searchsorted(np.sort(df[col]), df[col]) + df[col] = [rank[i] for i in t] + return df + +def set_data(dataset_name): + orig_file = "/home/zas1024/cfw_data/" + dataset_name + ".txt" + + sample_list = [] + with open(orig_file, 'r') as orig_fh, open('quant_norm.csv', 'r') as quant_fh: + for i, (line1, line2) in enumerate(izip(orig_fh, quant_fh)): + trait_dict = {} + sample_list = [] + if i == 0: + sample_names = line1.split('\t')[1:] + else: + trait_name = line1.split('\t')[0] + for i, sample in enumerate(sample_names): + this_sample = { + "name": sample, + "value": line1.split('\t')[i+1], + "qnorm": line2.split('\t')[i+1] + } + sample_list.append(this_sample) + query = """SELECT Species.SpeciesName, InbredSet.InbredSetName, ProbeSetFreeze.FullName + FROM Species, InbredSet, ProbeSetFreeze, ProbeFreeze, ProbeSetXRef, ProbeSet + WHERE Species.Id = InbredSet.SpeciesId and + InbredSet.Id = ProbeFreeze.InbredSetId and + ProbeFreeze.Id = ProbeSetFreeze.ProbeFreezeId and + ProbeSetFreeze.Name = '%s' and + ProbeSetFreeze.Id = ProbeSetXRef.ProbeSetFreezeId and + ProbeSetXRef.ProbeSetId = ProbeSet.Id and + ProbeSet.Name = '%s'""" % (dataset_name, line1.split('\t')[0]) + Cursor.execute(query) + result_info = Cursor.fetchone() + + yield { + "_index": "traits", + "_type": "trait", + "_source": { + "name": trait_name, + "species": result_info[0], + "group": result_info[1], + "dataset": dataset_name, + "dataset_fullname": result_info[2], + "samples": sample_list, + "transform_types": "qnorm" + } + } + +if __name__ == '__main__': + Conn = MySQLdb.Connect(**parse_db_uri()) + Cursor = Conn.cursor() + + #es = Elasticsearch([{ + # "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT + #}], timeout=60) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None + + es = get_elasticsearch_connection(for_user=False) + + #input_filename = "/home/zas1024/cfw_data/" + sys.argv[1] + ".txt" + #input_df = create_dataframe(input_filename) + #output_df = quantileNormalize(input_df) + + #output_df.to_csv('quant_norm.csv', sep='\t') + + #out_filename = sys.argv[1][:-4] + '_quantnorm.txt' + + #success, _ = bulk(es, set_data(sys.argv[1])) + + response = es.search( + index = "traits", doc_type = "trait", body = { + "query": { "match": { "name": "ENSMUSG00000028982" } } + } + ) + + print(response) \ No newline at end of file diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py index cce210c3..293a9ae6 100644 --- a/wqflask/utility/elasticsearch_tools.py +++ b/wqflask/utility/elasticsearch_tools.py @@ -52,7 +52,7 @@ def test_elasticsearch_connection(): if not es.ping(): logger.warning("Elasticsearch is DOWN") -def get_elasticsearch_connection(): +def get_elasticsearch_connection(for_user=True): """Return a connection to ES. Returns None on failure""" logger.info("get_elasticsearch_connection") es = None @@ -65,7 +65,8 @@ def get_elasticsearch_connection(): "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None - setup_users_index(es) + if for_user: + setup_users_index(es) es_logger = logging.getLogger("elasticsearch") es_logger.setLevel(logging.INFO) diff --git a/wqflask/wqflask/correlation/corr_scatter_plot.py b/wqflask/wqflask/correlation/corr_scatter_plot.py index 94711c67..831baf7e 100644 --- a/wqflask/wqflask/correlation/corr_scatter_plot.py +++ b/wqflask/wqflask/correlation/corr_scatter_plot.py @@ -6,44 +6,19 @@ from utility import corr_result_helpers from scipy import stats import numpy as np +import utility.logger +logger = utility.logger.getLogger(__name__ ) + class CorrScatterPlot(object): """Page that displays a correlation scatterplot with a line fitted to it""" def __init__(self, params): self.data_set_1 = data_set.create_dataset(params['dataset_1']) self.data_set_2 = data_set.create_dataset(params['dataset_2']) + #self.data_set_3 = data_set.create_dataset(params['dataset_3']) self.trait_1 = GeneralTrait(name=params['trait_1'], dataset=self.data_set_1) self.trait_2 = GeneralTrait(name=params['trait_2'], dataset=self.data_set_2) - - try: - width = int(params['width']) - except: - width = 800 - - try: - height = int(params['height']) - except: - height = 600 - - try: - circle_color = params['circle_color'] - except: - circle_color = '#3D85C6' - - try: - circle_radius = int(params['circle_radius']) - except: - circle_radius = 5 - - try: - line_color = params['line_color'] - except: - line_color = '#FF0000' - - try: - line_width = int(params['line_width']) - except: - line_width = 1 + #self.trait_3 = GeneralTrait(name=params['trait_3'], dataset=self.data_set_3) samples_1, samples_2, num_overlap = corr_result_helpers.normalize_values_with_samples(self.trait_1.data, self.trait_2.data) @@ -60,14 +35,18 @@ class CorrScatterPlot(object): x = np.array(vals_1) y = np.array(vals_2) - slope, intercept, r_value, p_value, _std_err = stats.linregress(x, y) + slope, intercept, r_value, p_value, std_err = stats.linregress(x, y) rx = stats.rankdata(x) ry = stats.rankdata(y) self.rdata = [] self.rdata.append(rx.tolist()) self.rdata.append(ry.tolist()) - srslope, srintercept, srr_value, srp_value, _srstd_err = stats.linregress(rx, ry) + srslope, srintercept, srr_value, srp_value, srstd_err = stats.linregress(rx, ry) + + #vals_3 = [] + #for sample in self.trait_3.data: + # vals_3.append(self.trait_3.data[sample].value) self.js_data = dict( data = self.data, @@ -89,13 +68,9 @@ class CorrScatterPlot(object): srslope = srslope, srintercept = srintercept, srr_value = srr_value, - srp_value = srp_value, + srp_value = srp_value - width = width, - height = height, - circle_color = circle_color, - circle_radius = circle_radius, - line_color = line_color, - line_width = line_width + #trait3 = self.trait_3.data, + #vals_3 = vals_3 ) self.jsdata = self.js_data diff --git a/wqflask/wqflask/show_trait/SampleList.py b/wqflask/wqflask/show_trait/SampleList.py index 6d84a960..78bb3b42 100644 --- a/wqflask/wqflask/show_trait/SampleList.py +++ b/wqflask/wqflask/show_trait/SampleList.py @@ -10,8 +10,12 @@ import numpy as np from scipy import stats from pprint import pformat as pf +import simplejson as json + import itertools +from utility.elasticsearch_tools import get_elasticsearch_connection + import utility.logger logger = utility.logger.getLogger(__name__ ) @@ -33,6 +37,8 @@ class SampleList(object): self.get_attributes() + self.sample_qnorm = get_transform_vals(self.dataset, this_trait) + if self.this_trait and self.dataset and self.dataset.type == 'ProbeSet': self.get_extra_attribute_values() @@ -152,36 +158,47 @@ class SampleList(object): return any(sample.variance for sample in self.sample_list) -#def z_score(vals): -# vals_array = np.array(vals) -# mean = np.mean(vals_array) -# stdv = np.std(vals_array) -# -# z_scores = [] -# for val in vals_array: -# z_score = (val - mean)/stdv -# z_scores.append(z_score) -# -# -# -# return z_scores - - -#def z_score(row): -# L = [n for n in row if not np.isnan(n)] -# m = np.mean(L) -# s = np.std(L) -# zL = [1.0 * (n - m) / s for n in L] -# if len(L) == len(row): return zL -# # deal with nan -# retL = list() -# for n in row: -# if np.isnan(n): -# retL.append(nan) -# else: -# retL.append(zL.pop(0)) -# assert len(zL) == 0 -# return retL +def get_transform_vals(dataset, trait): + es = get_elasticsearch_connection(for_user=False) + + logger.info("DATASET NAME:", dataset.name) + + query = '{"bool": {"must": [{"match": {"name": "%s"}}, {"match": {"dataset": "%s"}}]}}' % (trait.name, dataset.name) + + es_body = { + "query": { + "bool": { + "must": [ + { + "match": { + "name": "%s" % (trait.name) + } + }, + { + "match": { + "dataset": "%s" % (dataset.name) + } + } + ] + } + } + } + + response = es.search( index = "traits", doc_type = "trait", body = es_body ) + logger.info("THE RESPONSE:", response) + results = response['hits']['hits'] + + if len(results) > 0: + samples = results[0]['_source']['samples'] + + sample_dict = {} + for sample in samples: + sample_dict[sample['name']] = sample['qnorm'] + + logger.info("SAMPLE DICT:", sample_dict) + return sample_dict + else: + return None def natural_sort_key(x): """Get expected results when using as a key for sort - ints or strings are sorted properly""" diff --git a/wqflask/wqflask/show_trait/show_trait.py b/wqflask/wqflask/show_trait/show_trait.py index 8b801396..d6d83c02 100644 --- a/wqflask/wqflask/show_trait/show_trait.py +++ b/wqflask/wqflask/show_trait/show_trait.py @@ -26,9 +26,6 @@ from db import webqtlDatabaseFunction from pprint import pformat as pf -from utility.tools import flat_files, flat_file_exists -from utility.tools import get_setting - from utility.logger import getLogger logger = getLogger(__name__ ) @@ -306,7 +303,6 @@ def get_nearest_marker(this_trait, this_db): #return "", "" else: return result[0][0] - #return result[0][0], result[1][0] def get_genofiles(this_dataset): jsonfile = "%s/%s.json" % (webqtlConfig.GENODIR, this_dataset.group.name) diff --git a/wqflask/wqflask/static/new/css/corr_scatter_plot.css b/wqflask/wqflask/static/new/css/corr_scatter_plot.css index c62d4c9a..a2ebb252 100644 --- a/wqflask/wqflask/static/new/css/corr_scatter_plot.css +++ b/wqflask/wqflask/static/new/css/corr_scatter_plot.css @@ -1,13 +1,41 @@ -.chart { +.nvd3 .nv-axis.nv-x text { + font-size: 16px; + font-weight: normal; + fill: black; +} +.nvd3 .nv-axis.nv-y text { + font-size: 16px; + font-weight: normal; + fill: black; } -.main text { - font: 10px sans-serif; +.nv-x .nv-axis g path.domain { + stroke: black; + stroke-width: 2; } -.axis line, .axis path { - shape-rendering: crispEdges; +.nv-y .nv-axis g path.domain { stroke: black; - fill: none; + stroke-width: 2; +} + +.nvd3 .nv-axis.nv-x path.domain { + stroke-opacity: 1; +} + +.nvd3 .nv-axis.nv-y path.domain { + stroke-opacity: 1; +} + +line.nv-regLine { + stroke: red; + stroke-width: 1; +} + +.nv-axisMin-x, +.nv-axisMax-x, +.nv-axisMin-y, +.nv-axisMax-y { + display: none; } diff --git a/wqflask/wqflask/static/new/javascript/draw_corr_scatterplot.js b/wqflask/wqflask/static/new/javascript/draw_corr_scatterplot.js index cfde6f09..c290cdfe 100644 --- a/wqflask/wqflask/static/new/javascript/draw_corr_scatterplot.js +++ b/wqflask/wqflask/static/new/javascript/draw_corr_scatterplot.js @@ -1,3 +1,5 @@ +// http://gn2-lei.genenetwork.org/corr_scatter_plot2?dataset_1=HC_M2_0606_P&dataset_2=HC_M2_0606_P&dataset_3=HC_M2_0606_P&trait_1=1427571_at&trait_2=1457022_at&trait_3=1427571_at + var chart; var srchart; @@ -7,7 +9,7 @@ function drawg() { // chart.showLegend(false); chart.duration(300); - chart.color(d3.scale.category10().range()); + //chart.color(d3.scale.category10().range()); chart.pointRange([0, 400]); chart.pointDomain([0, 10]); // @@ -74,18 +76,47 @@ function getdata(size, shape) { slope: js_data.slope, intercept: js_data.intercept }); + + sizemin = 1; + sizemax = 50; + if ('vals_3' in js_data) { + datamin = d3.min(js_data.vals_3); + datamax = d3.max(js_data.vals_3); + colormin = $("#cocolorfrom").val(); + colormax = $("#cocolorto").val(); + compute = d3.interpolate(colormin, colormax); + linear = d3.scale.linear().domain([datamin, datamax]).range([0,1]); + } + for (j = 0; j < js_data.data[0].length; j++) { + if ('trait3' in js_data) { + if (js_data.indIDs[j] in js_data.trait3) { + datav = js_data.trait3[js_data.indIDs[j]].value; + // size = (sizemax - sizemin) * (datav - datamin) / (datamax - datamin) + sizemin; + sizev = map1to2(datamin, datamax, sizemin, sizemax, datav); + } + } else { + datav = 0; + sizev = sizemin; + } data[0].values.push({ x: js_data.data[0][j], y: js_data.data[1][j], name: js_data.indIDs[j], - size: size, - shape: shape + size: sizev, + shape: shape, + v3: datav }); } + console.log(data); return data; } +function map1to2 (min1, max1, min2, max2, v1) { + v2 = (v1 - min1) * (max2 - min2) / (max1 - min1) + min2; + return v2; +} + function srgetdata(size, shape) { var data = []; data.push({ @@ -94,6 +125,12 @@ function srgetdata(size, shape) { intercept: js_data.srintercept }); for (j = 0; j < js_data.rdata[0].length; j++) { + if (js_data.indIDs[j] in js_data.trait3) { + size = js_data.trait3[js_data.indIDs[j]].value; + //console.log("yes "+js_data.indIDs[j]+", "+size); + } else { + //console.log("no "+js_data.indIDs[j]); + } data[0].values.push({ x: js_data.rdata[0][j], y: js_data.rdata[1][j], @@ -163,13 +200,39 @@ function chartupdatewh() { window.dispatchEvent(new Event('resize')); } + function colorer(d) { + datamin = d3.min(js_data.vals_3); + datamax = d3.max(js_data.vals_3); + //colormin = d3.rgb(255,0,0); + //colormax = d3.rgb(0,255,0); + colormin = $("#cocolorfrom").val(); + colormax = $("#cocolorto").val(); + + console.log("colormin: "+colormin); + console.log("colormax: "+colormax); + + compute = d3.interpolate(colormin, colormax); + linear = d3.scale.linear().domain([datamin, datamax]).range([0,1]); + //console.log(d[0].x); + c= compute(linear(d[0].x)); + //console.log(c); + return c; + } + function chartupdatedata() { // var size = $("#marksize").val(); var shape = $("#markshape").val(); // - d3.select('#scatterplot2 svg').datum(nv.log(getdata(size, shape))).call(chart); + d3.select('#scatterplot2 svg').datum(getdata(size, shape)).call(chart); d3.select('#srscatterplot2 svg').datum(nv.log(srgetdata(size, shape))).call(srchart); + // + d3.selectAll('.nv-point') + .attr({ + 'stroke': colorer, + 'fill': colorer + }); + // nv.utils.windowResize(chart.update); nv.utils.windowResize(srchart.update); } diff --git a/wqflask/wqflask/static/new/javascript/show_trait.js b/wqflask/wqflask/static/new/javascript/show_trait.js index dfdafaf0..17afc814 100644 --- a/wqflask/wqflask/static/new/javascript/show_trait.js +++ b/wqflask/wqflask/static/new/javascript/show_trait.js @@ -559,6 +559,18 @@ })(this)); }; $('#reset').click(reset_samples_table); + switch_qnorm_data = function() { + return $('.trait_value_input').each((function(_this) { + return function(_index, element) { + transform_val = $(element).data('transform') + if (transform_val != "") { + $(element).val(transform_val.toFixed(3)); + } + return transform_val + }; + })(this)); + }; + $('#qnorm').click(switch_qnorm_data); get_sample_table_data = function(table_name) { var samples; samples = []; @@ -871,6 +883,7 @@ $('#exclude_group').click(edit_data_change); $('#block_outliers').click(edit_data_change); $('#reset').click(edit_data_change); + $('#qnorm').click(edit_data_change); return console.log("end"); }); diff --git a/wqflask/wqflask/static/new/javascript/show_trait_mapping_tools.js b/wqflask/wqflask/static/new/javascript/show_trait_mapping_tools.js index 81123de7..daa5b3f2 100644 --- a/wqflask/wqflask/static/new/javascript/show_trait_mapping_tools.js +++ b/wqflask/wqflask/static/new/javascript/show_trait_mapping_tools.js @@ -213,7 +213,7 @@ var form_data, url; console.log("RUNNING GEMMA"); url = "/loading"; - $('input[name=method]').val("gemma_bimbam"); + $('input[name=method]').val("gemma"); $('input[name=num_perm]').val(0); $('input[name=genofile]').val($('#genofile_gemma').val()); $('input[name=maf]').val($('input[name=maf_gemma]').val()); diff --git a/wqflask/wqflask/templates/corr_scatterplot.html b/wqflask/wqflask/templates/corr_scatterplot.html index 87588534..e0f017c2 100644 --- a/wqflask/wqflask/templates/corr_scatterplot.html +++ b/wqflask/wqflask/templates/corr_scatterplot.html @@ -1,222 +1,254 @@ -{% extends "base.html" %} - -{% block css %} - - - - - - - -{% endblock %} - -{% block content %} - -
- -

Correlation Scatterplot

- -
- - {% if trait_1.dataset.type == "ProbeSet" %} -
- X axis: - - {{trait_1.dataset.group.species + " " + trait_1.dataset.group.name + " " + trait_1.dataset.tissue + " " + trait_1.dataset.name + ": " + trait_1.name|string}} - -
-
- [{{trait_1.symbol}} on {{trait_1.location_repr}} Mb] - {{trait_1.description_display}} -
- {% elif trait_1.dataset.type == "Publish" %} -
- X axis: - - {{trait_1.dataset.group.species + " " + trait_1.dataset.group.name + " " + trait_1.dataset.name + ": " + trait_1.name|string}} - -
-
- PubMed: {{trait_1.pubmed_text}} - {{trait_1.description_display}} -
- {% endif %} - -
- - {% if trait_2.dataset.type == "ProbeSet" %} -
- Y axis: - - {{trait_2.dataset.group.species + " " + trait_2.dataset.group.name + " " + trait_2.dataset.tissue + " " + trait_2.dataset.name + ": " + trait_2.name|string}} - -
-
- [{{trait_2.symbol}} on {{trait_2.location_repr}} Mb] - {{trait_2.description_display}} -
- {% elif trait_2.dataset.type == "Publish" %} -
- Y axis: - - {{trait_2.dataset.group.species + " " + trait_2.dataset.group.name + " " + trait_2.dataset.name + ": " + trait_2.name|string}} - -
-
- PubMed: {{trait_2.pubmed_text}} - {{trait_2.description_display}} -
- {% endif %} - -
- -
- - - - - - -
Width pxHeight px
- - - - - -
Mark - - - -
- - - - - - - - -
Label - - px - Number - - px - Axis - - px - Line - - px -
- -
- - - -
-
- -
- -
Save as SVG
- -

Pearson Correlation Scatterplot

- -
- -
- - - - - - - - - - - - - - - -
StatisticValue
Number {{jsdata.num_overlap}}
Slope {{'%0.3f' % jsdata.slope}}
Intercept {{'%0.3f' % jsdata.intercept}}
r value {{'%0.3f' % jsdata.r_value}}
P value {% if jsdata.p_value < 0.001 %}{{'%0.3e' % jsdata.p_value}}{% else %}{{'%0.3f' % jsdata.p_value}}{% endif %}
- Regression Line -
- y = {{'%0.3f' % jsdata.slope}} * x + {{'%0.3f' % jsdata.intercept}} -
- -
- -
- -
- -
Save as SVG
- -

Spearman Rank Correlation Scatterplot

- -
- -
- - - - - - - - - - - - -
StatisticValue
Number {{jsdata.num_overlap}}
Slope {{'%0.3f' % jsdata.srslope}}
Intercept {{'%0.3f' % jsdata.srintercept}}
r value {{'%0.3f' % jsdata.srr_value}}
P value {% if jsdata.srp_value < 0.001 %}{{'%0.3e' % jsdata.srp_value}}{% else %}{{'%0.3f' % jsdata.srp_value}}{% endif %}
- -
- -
- -
- -{% endblock %} - -{% block js %} - - - - - - - - -{% endblock %} +{% extends "base.html" %} + +{% block css %} + + + + + +{% endblock %} + +{% block content %} + +
+ +

Correlation Scatterplot

+ +
+ {% if trait_1.dataset.type == "ProbeSet" %} +
+ X axis: + + {{trait_1.dataset.group.species + " " + trait_1.dataset.group.name + " " + trait_1.dataset.tissue + " " + trait_1.dataset.name + ": " + trait_1.name|string}} + +
+
+ [{{trait_1.symbol}} on {{trait_1.location_repr}} Mb] + {{trait_1.description_display}} +
+ {% elif trait_1.dataset.type == "Publish" %} +
+ X axis: + + {{trait_1.dataset.group.species + " " + trait_1.dataset.group.name + " " + trait_1.dataset.name + ": " + trait_1.name|string}} + +
+
+ PubMed: {{trait_1.pubmed_text}} + {{trait_1.description_display}} +
+ {% endif %} + +
+ + {% if trait_2.dataset.type == "ProbeSet" %} +
+ Y axis: + + {{trait_2.dataset.group.species + " " + trait_2.dataset.group.name + " " + trait_2.dataset.tissue + " " + trait_2.dataset.name + ": " + trait_2.name|string}} + +
+
+ [{{trait_2.symbol}} on {{trait_2.location_repr}} Mb] + {{trait_2.description_display}} +
+ {% elif trait_2.dataset.type == "Publish" %} +
+ Y axis: + + {{trait_2.dataset.group.species + " " + trait_2.dataset.group.name + " " + trait_2.dataset.name + ": " + trait_2.name|string}} + +
+
+ PubMed: {{trait_2.pubmed_text}} + {{trait_2.description_display}} +
+ {% endif %} + +
+ + {% if trait_3 %} + {% if trait_3.dataset.type == "ProbeSet" %} +
+ Cofactor: + + {{trait_3.dataset.group.species + " " + trait_3.dataset.group.name + " " + trait_3.dataset.tissue + " " + trait_3.dataset.name + ": " + trait_3.name|string}} + +
+
+ [{{trait_3.symbol}} on {{trait_3.location_repr}} Mb] + {{trait_3.description_display}} +
+ {% elif trait_3.dataset.type == "Publish" %} +
+ Cofactor: + + {{trait_3.dataset.group.species + " " + trait_3.dataset.group.name + " " + trait_3.dataset.name + ": " + trait_3.name|string}} + +
+
+ PubMed: {{trait_3.pubmed_text}} + {{trait_3.description_display}} +
+ {% endif %} + {% endif %} + + + + +
+ +
+ + + + + + +
Width pxHeight px
+ + + + + + + + +
Label + + px + Number + + px + Axis + + px + Line + + px +
+ +
+ + + +
+ +
+
+
+ Save as SVG +
+
+

Pearson Correlation Scatterplot

+
+
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
StatisticValue
Number{{jsdata.num_overlap}}
Slope{{'%0.3f' % jsdata.slope}}
Intercept{{'%0.3f' % jsdata.intercept}}
r value{{'%0.3f' % jsdata.r_value}}
P value{% if jsdata.p_value < 0.001 %}{{'%0.3e' % jsdata.p_value}}{% else %}{{'%0.3f' % jsdata.p_value}}{% endif %}
+ Regression Line +
+ y = {{'%0.3f' % jsdata.slope}} * x + {{'%0.3f' % jsdata.intercept}} +
+
+ +
+ +
+ +
+ Save as SVG +
+
+

Spearman Rank Correlation Scatterplot

+
+
+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
StatisticValue
Number{{jsdata.num_overlap}}
Slope{{'%0.3f' % jsdata.srslope}}
Intercept{{'%0.3f' % jsdata.srintercept}}
r value{{'%0.3f' % jsdata.srr_value}}
P value{% if jsdata.srp_value < 0.001 %}{{'%0.3e' % jsdata.srp_value}}{% else %}{{'%0.3f' % jsdata.srp_value}}{% endif %}
+
+ +
+ +
+ +{% endblock %} + +{% block js %} + + + + + + + + +{% endblock %} diff --git a/wqflask/wqflask/templates/show_trait_edit_data.html b/wqflask/wqflask/templates/show_trait_edit_data.html index a431821e..1402db47 100644 --- a/wqflask/wqflask/templates/show_trait_edit_data.html +++ b/wqflask/wqflask/templates/show_trait_edit_data.html @@ -54,6 +54,10 @@ +
+ {% if sample_groups[0].sample_qnorm is not none %} + + {% endif %}
@@ -111,7 +115,7 @@ {# Todo: Add IDs #} - previous_chr: previous_chr = marker['chr'] elif marker['chr'] < previous_chr: diff --git a/wqflask/wqflask/show_trait/SampleList.py b/wqflask/wqflask/show_trait/SampleList.py index 78bb3b42..8dbba530 100644 --- a/wqflask/wqflask/show_trait/SampleList.py +++ b/wqflask/wqflask/show_trait/SampleList.py @@ -37,7 +37,7 @@ class SampleList(object): self.get_attributes() - self.sample_qnorm = get_transform_vals(self.dataset, this_trait) + #self.sample_qnorm = get_transform_vals(self.dataset, this_trait) if self.this_trait and self.dataset and self.dataset.type == 'ProbeSet': self.get_extra_attribute_values() diff --git a/wqflask/wqflask/show_trait/show_trait.py b/wqflask/wqflask/show_trait/show_trait.py index d6d83c02..7b952af4 100644 --- a/wqflask/wqflask/show_trait/show_trait.py +++ b/wqflask/wqflask/show_trait/show_trait.py @@ -12,6 +12,8 @@ from collections import OrderedDict import redis Redis = redis.StrictRedis() +import scipy.stats as ss + from flask import Flask, g from htmlgen import HTMLgen2 as HT @@ -137,6 +139,8 @@ class ShowTrait(object): self.make_sample_lists() + self.qnorm_vals = quantile_normalize_vals(self.sample_groups) + # Todo: Add back in the ones we actually need from below, as we discover we need them hddn = OrderedDict() @@ -281,6 +285,44 @@ class ShowTrait(object): self.sample_groups = (primary_samples,) self.dataset.group.allsamples = all_samples_ordered +def quantile_normalize_vals(sample_groups): + def normf(trait_vals): + ranked_vals = ss.rankdata(trait_vals) + p_list = [] + for i, val in enumerate(trait_vals): + p_list.append(((i+1) - 0.5)/len(trait_vals)) + + z = ss.norm.ppf(p_list) + normed_vals = [] + for rank in ranked_vals: + normed_vals.append("%0.3f" % z[int(rank)-1]) + + return normed_vals + + qnorm_by_group = [] + for sample_type in sample_groups: + trait_vals = [] + for sample in sample_type.sample_list: + try: + trait_vals.append(float(sample.value)) + except: + continue + + qnorm_vals = normf(trait_vals) + + qnorm_vals_with_x = [] + counter = 0 + for sample in sample_type.sample_list: + if sample.display_value == "x": + qnorm_vals_with_x.append("x") + else: + qnorm_vals_with_x.append(qnorm_vals[counter]) + counter += 1 + + qnorm_by_group.append(qnorm_vals_with_x) + + return qnorm_by_group + def get_nearest_marker(this_trait, this_db): this_chr = this_trait.locus_chr logger.debug("this_chr:", this_chr) diff --git a/wqflask/wqflask/static/new/javascript/show_trait.js b/wqflask/wqflask/static/new/javascript/show_trait.js index 17afc814..5e2ecc33 100644 --- a/wqflask/wqflask/static/new/javascript/show_trait.js +++ b/wqflask/wqflask/static/new/javascript/show_trait.js @@ -549,6 +549,7 @@ }; $('#block_outliers').click(block_outliers); reset_samples_table = function() { + $('input[name="transform"]').val(""); return $('.trait_value_input').each((function(_this) { return function(_index, element) { console.log("value is:", $(element).val()); @@ -559,6 +560,52 @@ })(this)); }; $('#reset').click(reset_samples_table); + + log_normalize_data = function() { + return $('.trait_value_input').each((function(_this) { + return function(_index, element) { + current_value = $(element).data("value"); + if(isNaN(current_value)) { + return current_value + } else { + $(element).val(Math.log2(current_value).toFixed(3)); + return Math.log2(current_value).toFixed(3) + } + }; + })(this)); + }; + + qnorm_data = function() { + return $('.trait_value_input').each((function(_this) { + return function(_index, element) { + current_value = $(element).data("value"); + if(isNaN(current_value)) { + return current_value + } else { + $(element).val($(element).data("qnorm")); + return $(element).data("qnorm"); + } + }; + })(this)); + }; + + normalize_data = function() { + if ($('#norm_method option:selected').val() == 'log2'){ + if ($('input[name="transform"]').val() != "log2") { + log_normalize_data() + $('input[name="transform"]').val("log2") + } + } + else if ($('#norm_method option:selected').val() == 'qnorm'){ + if ($('input[name="transform"]').val() != "qnorm") { + qnorm_data() + $('input[name="transform"]').val("qnorm") + } + } + } + + $('#normalize').click(normalize_data); + switch_qnorm_data = function() { return $('.trait_value_input').each((function(_this) { return function(_index, element) { @@ -734,7 +781,7 @@ box_data = [trace1, trace2, trace3] } else { var box_layout = { - width: 500, + width: 300, height: 500, margin: { l: 50, @@ -834,7 +881,7 @@ var layout = { yaxis: { - range: [range_bottom, range_top] + range: [range_bottom, range_top], }, width: 1200, height: 500, @@ -884,6 +931,7 @@ $('#block_outliers').click(edit_data_change); $('#reset').click(edit_data_change); $('#qnorm').click(edit_data_change); + $('#normalize').click(edit_data_change); return console.log("end"); }); diff --git a/wqflask/wqflask/templates/index_page_orig.html b/wqflask/wqflask/templates/index_page_orig.html index 2a5556ea..dba3e266 100755 --- a/wqflask/wqflask/templates/index_page_orig.html +++ b/wqflask/wqflask/templates/index_page_orig.html @@ -34,11 +34,11 @@
-
-
- +
+
+
-
+
@@ -46,9 +46,9 @@
-
-
- +
+
+
@@ -56,21 +56,21 @@
-
-
- +
+
+
-
-
- +
+
+
-
+
@@ -85,8 +85,8 @@
-
-
+
+
@@ -95,8 +95,8 @@
-
-
+
+
Enter terms, genes, ID numbers in the Search field.
Use * or ? wildcards (Cyp*a?, synap*).
Use quotes for terms such as "tyrosine kinase". @@ -106,8 +106,8 @@
-
-
+
+
@@ -115,8 +115,8 @@
-
-
+
+
diff --git a/wqflask/wqflask/templates/show_trait.html b/wqflask/wqflask/templates/show_trait.html index 4aad4242..f67fff90 100644 --- a/wqflask/wqflask/templates/show_trait.html +++ b/wqflask/wqflask/templates/show_trait.html @@ -35,6 +35,7 @@ +
diff --git a/wqflask/wqflask/templates/show_trait_edit_data.html b/wqflask/wqflask/templates/show_trait_edit_data.html index 1402db47..482d1d88 100644 --- a/wqflask/wqflask/templates/show_trait_edit_data.html +++ b/wqflask/wqflask/templates/show_trait_edit_data.html @@ -9,7 +9,7 @@ needed.

-
+
{% for attribute in sample_groups[0].attributes %} @@ -42,7 +39,6 @@
{% endif %} -
@@ -55,9 +51,18 @@
+
+ + +
+

@@ -79,6 +84,7 @@ {% for sample_type in sample_groups %} + {% set outer_loop = loop %}

{{ sample_type.header }}


@@ -115,7 +121,7 @@ {# Todo: Add IDs #} -
- {% if g.user_session.logged_in and (g.user_session.num_collections > 0) %} + {% if g.user_session.logged_in %} + {% if g.user_session.num_collections < 1 %} No collections available. Please add traits to a collection to use them as covariates. + {% endif %} {% elif g.cookie_session.display_num_collections() == "" %} No collections available. Please add traits to a collection to use them as covariates. {% else %} diff --git a/wqflask/wqflask/user_manager.py b/wqflask/wqflask/user_manager.py index 5d388d66..830c7864 100644 --- a/wqflask/wqflask/user_manager.py +++ b/wqflask/wqflask/user_manager.py @@ -67,7 +67,10 @@ class AnonUser(object): # but wouldn't set cookies for staging and my branch. The new code (using @app.after_request) seems to work. @app.after_request def set_cookie(response): - response.set_cookie(self.cookie_name, self.cookie) + if self.cookie: + pass + else: + response.set_cookie(self.cookie_name, self.cookie) return response #@after.after_this_request -- cgit v1.2.3