aboutsummaryrefslogtreecommitdiff
path: root/wqflask/utility
diff options
context:
space:
mode:
Diffstat (limited to 'wqflask/utility')
-rw-r--r--wqflask/utility/elasticsearch_tools.py40
1 files changed, 24 insertions, 16 deletions
diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py
index 4fc0035c..a964b025 100644
--- a/wqflask/utility/elasticsearch_tools.py
+++ b/wqflask/utility/elasticsearch_tools.py
@@ -1,23 +1,31 @@
-es = None
-try:
- from elasticsearch import Elasticsearch, TransportError
- from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
+from elasticsearch import Elasticsearch, TransportError
+import logging
- es = Elasticsearch([{
- "host": ELASTICSEARCH_HOST
- , "port": ELASTICSEARCH_PORT
- }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
-
-except:
+def get_elasticsearch_connection():
es = None
+ try:
+ from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
+
+ es = Elasticsearch([{
+ "host": ELASTICSEARCH_HOST
+ , "port": ELASTICSEARCH_PORT
+ }]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
+
+ es_logger = logging.getLogger("elasticsearch")
+ es_logger.setLevel(logging.INFO)
+ es_logger.addHandler(logging.NullHandler())
+ except:
+ es = None
+
+ return es
-def get_user_by_unique_column(column_name, column_value):
- return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
+def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"):
+ return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type)
-def save_user(user, user_id):
- es_save_data("users", "local", user, user_id)
+def save_user(es, user, user_id):
+ es_save_data(es, "users", "local", user, user_id)
-def get_item_by_unique_column(column_name, column_value, index, doc_type):
+def get_item_by_unique_column(es, column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
@@ -32,7 +40,7 @@ def get_item_by_unique_column(column_name, column_value, index, doc_type):
pass
return item_details
-def es_save_data(index, doc_type, data_item, data_id,):
+def es_save_data(es, index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing