about summary refs log tree commit diff
diff options
context:
space:
mode:
authorPjotr Prins2016-06-18 07:45:07 +0000
committerPjotr Prins2016-06-18 07:45:07 +0000
commitee70713e9a296ac9a855c73c3f49f585bae8946a (patch)
tree9a22d8fac2273e55d5761741616ea1905a39bf09
parent3a59631a4a95f4b4a1911e3a10912cdcbdf31200 (diff)
downloadgenenetwork2-ee70713e9a296ac9a855c73c3f49f585bae8946a.tar.gz
Logger: introducing logging
-rw-r--r--wqflask/runserver.py5
-rw-r--r--wqflask/utility/tools.py1
-rw-r--r--wqflask/wqflask/views.py139
3 files changed, 72 insertions, 73 deletions
diff --git a/wqflask/runserver.py b/wqflask/runserver.py
index f40d8a48..bfc27827 100644
--- a/wqflask/runserver.py
+++ b/wqflask/runserver.py
@@ -20,7 +20,10 @@ from wqflask import app
 #_ch = logging.StreamHandler()
 #_log.addHandler(_ch)
 
-print app.config
+import utility.logger
+logger = utility.logger.getLogger(__name__ )
+
+logger.info(app.config)
 
 from utility.tools import WEBSERVER_MODE
 
diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py
index 08495c2f..824ce330 100644
--- a/wqflask/utility/tools.py
+++ b/wqflask/utility/tools.py
@@ -8,7 +8,6 @@ from wqflask import app
 # Use the standard logger here to avoid a circular dependency
 import logging
 logger = logging.getLogger(__name__ )
-logging.basicConfig(level=logging.INFO)
 
 def get_setting(command_id,guess=None):
     """Resolve a setting from the environment or the global settings in
diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py
index 06db7494..c07e03ff 100644
--- a/wqflask/wqflask/views.py
+++ b/wqflask/wqflask/views.py
@@ -1,4 +1,7 @@
 # -*- coding: utf-8 -*-
+#
+# Main routing table for GN2
+
 from __future__ import absolute_import, division, print_function
 
 import sys
@@ -12,7 +15,6 @@ import cPickle as pickle
 import uuid
 
 import simplejson as json
-#import json
 import yaml
 
 #Switching from Redis to StrictRedis; might cause some issues
@@ -23,13 +25,10 @@ import flask
 import base64
 import array
 import sqlalchemy
-#import config
-
 from wqflask import app
-
 from flask import (render_template, request, make_response, Response,
-                   Flask, g, config, jsonify, redirect, url_for, send_from_directory)
-
+                   Flask, g, config, jsonify, redirect, url_for,
+                   send_from_directory)
 from wqflask import search_results
 from wqflask import gsearch
 from wqflask import update_search_results
@@ -45,7 +44,6 @@ from wqflask.marker_regression import marker_regression_gn1
 from wqflask.correlation import show_corr_results
 from wqflask.correlation_matrix import show_corr_matrix
 from wqflask.correlation import corr_scatter_plot
-
 from wqflask.wgcna import wgcna_analysis
 from wqflask.ctl import ctl_analysis
 
@@ -61,9 +59,8 @@ from pprint import pformat as pf
 from wqflask import user_manager
 from wqflask import collect
 
-#import logging
-#logging.basicConfig(filename="/tmp/gn_log", level=logging.INFO)
-#_log = logging.getLogger("correlation")
+import utility.logger
+logger = utility.logger.getLogger(__name__ )
 
 @app.before_request
 def connect_db():
@@ -76,28 +73,28 @@ def connect_db():
 
 @app.route("/")
 def index_page():
-    print("Sending index_page")
+    logger.info("Sending index_page")
     #create_datasets_list()
     #key = "all_datasets"
     #result = Redis.get(key)
     #if result:
-    #    print("Cache hit!!!")
+    #    logger.info("Cache hit!!!")
     #    result = pickle.loads(result)
     #else:
     #    with Bench("Creating DataSets object"):
     #        ds = DataSets()
     #    Redis.set(key, pickle.dumps(result, pickle.HIGHEST_PROTOCOL))
     #    Redis.expire(key, 2*60)
-    #print("[orange] ds:", ds.datasets)
+    #logger.info("[orange] ds:", ds.datasets)
     return render_template("index_page.html")
 
 
 @app.route("/tmp/<img_path>")
 def tmp_page(img_path):
-    print("In tmp_page")
-    print("img_path:", img_path)
+    logger.info("In tmp_page")
+    logger.info("img_path:", img_path)
     initial_start_vars = request.form
-    print("initial_start_vars:", initial_start_vars)
+    logger.info("initial_start_vars:", initial_start_vars)
     imgfile = open(GENERATED_IMAGE_DIR + img_path, 'rb')
     imgdata = imgfile.read()
     imgB64 = imgdata.encode("base64")
@@ -108,15 +105,15 @@ def tmp_page(img_path):
 
 #@app.route("/data_sharing")
 #def data_sharing_page():
-#    print("In data_sharing")
+#    logger.info("In data_sharing")
 #    fd = webqtlFormData.webqtlFormData(request.args)
-#    print("1Have fd")
+#    logger.info("1Have fd")
 #    sharingInfoObject = SharingInfo.SharingInfo(request.args['GN_AccessionId'], None)
 #    info, htmlfilelist = sharingInfoObject.getBody(infoupdate="")
-#    print("type(htmlfilelist):", type(htmlfilelist))
+#    logger.info("type(htmlfilelist):", type(htmlfilelist))
 #    htmlfilelist = htmlfilelist.encode("utf-8")
 #    #template_vars = SharingInfo.SharingInfo(request.args['GN_AccessionId'], None)
-#    print("1 Made it to rendering")
+#    logger.info("1 Made it to rendering")
 #    return render_template("data_sharing.html",
 #                            info=info,
 #                            htmlfilelist=htmlfilelist)
@@ -124,37 +121,37 @@ def tmp_page(img_path):
 
 @app.route("/search", methods=('GET',))
 def search_page():
-    print("in search_page")
+    logger.info("in search_page")
     if 'info_database' in request.args:
-        print("Going to sharing_info_page")
+        logger.info("Going to sharing_info_page")
         template_vars = sharing_info_page()
         if template_vars.redirect_url:
-            print("Going to redirect")
+            logger.info("Going to redirect")
             return flask.redirect(template_vars.redirect_url)
         else:
             return render_template("data_sharing.html", **template_vars.__dict__)
     else:
         key = "search_results:v1:" + json.dumps(request.args, sort_keys=True)
-        print("key is:", pf(key))
+        logger.info("key is:", pf(key))
         if USE_REDIS:
             with Bench("Trying Redis cache"):
                 result = Redis.get(key)
         else:
-            print("Skipping Redis cache (USE_REDIS=False)")
+            logger.info("Skipping Redis cache (USE_REDIS=False)")
             result = None
 
         if result:
-            print("Cache hit on search results!!!")
-            print("USE_REDIS=",USE_REDIS)
+            logger.info("Cache hit on search results!!!")
+            logger.info("USE_REDIS=",USE_REDIS)
             with Bench("Loading results"):
                 result = pickle.loads(result)
         else:
-            print("calling search_results.SearchResultPage")
-            print("request.args is", request.args)
+            logger.info("calling search_results.SearchResultPage")
+            logger.info("request.args is", request.args)
             the_search = search_results.SearchResultPage(request.args)
             result = the_search.__dict__
 
-            print("result: ", pf(result))
+            logger.info("result: ", pf(result))
             if USE_REDIS:
                 Redis.set(key, pickle.dumps(result, pickle.HIGHEST_PROTOCOL))
                 Redis.expire(key, 60*60)
@@ -175,7 +172,7 @@ def gsearchact():
 
 @app.route("/gsearch_updating", methods=('POST',))
 def gsearch_updating():
-    print("REQUEST ARGS:", request.values)
+    logger.info("REQUEST ARGS:", request.values)
     result = update_search_results.GSearch(request.args).__dict__
     return result['results']
     # type = request.args['type']
@@ -200,12 +197,12 @@ def help():
 
 @app.route("/wgcna_setup", methods=('POST',))
 def wcgna_setup():
-    print("In wgcna, request.form is:", request.form)             # We are going to get additional user input for the analysis
+    logger.info("In wgcna, request.form is:", request.form)             # We are going to get additional user input for the analysis
     return render_template("wgcna_setup.html", **request.form)          # Display them using the template
 
 @app.route("/wgcna_results", methods=('POST',))
 def wcgna_results():
-    print("In wgcna, request.form is:", request.form)
+    logger.info("In wgcna, request.form is:", request.form)
     wgcna = wgcna_analysis.WGCNA()                                # Start R, load the package and pointers and create the analysis
     wgcnaA = wgcna.run_analysis(request.form)                     # Start the analysis, a wgcnaA object should be a separate long running thread
     result = wgcna.process_results(wgcnaA)                        # After the analysis is finished store the result
@@ -213,12 +210,12 @@ def wcgna_results():
 
 @app.route("/ctl_setup", methods=('POST',))
 def ctl_setup():
-    print("In ctl, request.form is:", request.form)             # We are going to get additional user input for the analysis
+    logger.info("In ctl, request.form is:", request.form)             # We are going to get additional user input for the analysis
     return render_template("ctl_setup.html", **request.form)          # Display them using the template
 
 @app.route("/ctl_results", methods=('POST',))
 def ctl_results():
-    print("In ctl, request.form is:", request.form)
+    logger.info("In ctl, request.form is:", request.form)
     ctl = ctl_analysis.CTL()                                  # Start R, load the package and pointers and create the analysis
     ctlA = ctl.run_analysis(request.form)                     # Start the analysis, a ctlA object should be a separate long running thread
     result = ctl.process_results(ctlA)                        # After the analysis is finished store the result
@@ -257,11 +254,11 @@ def environments():
 @app.route('/export_trait_excel', methods=('POST',))
 def export_trait_excel():
     """Excel file consisting of the sample data from the trait data and analysis page"""
-    print("In export_trait_excel")
-    print("request.form:", request.form)
+    logger.info("In export_trait_excel")
+    logger.info("request.form:", request.form)
     sample_data = export_trait_data.export_sample_table(request.form)
 
-    print("sample_data - type: %s -- size: %s" % (type(sample_data), len(sample_data)))
+    logger.info("sample_data - type: %s -- size: %s" % (type(sample_data), len(sample_data)))
 
     buff = StringIO.StringIO()
     workbook = xlsxwriter.Workbook(buff, {'in_memory': True})
@@ -282,11 +279,11 @@ def export_trait_excel():
 @app.route('/export_trait_csv', methods=('POST',))
 def export_trait_csv():
     """CSV file consisting of the sample data from the trait data and analysis page"""
-    print("In export_trait_csv")
-    print("request.form:", request.form)
+    logger.info("In export_trait_csv")
+    logger.info("request.form:", request.form)
     sample_data = export_trait_data.export_sample_table(request.form)
 
-    print("sample_data - type: %s -- size: %s" % (type(sample_data), len(sample_data)))
+    logger.info("sample_data - type: %s -- size: %s" % (type(sample_data), len(sample_data)))
 
     buff = StringIO.StringIO()
     writer = csv.writer(buff)
@@ -326,22 +323,22 @@ def export_perm_data():
 def show_trait_page():
     # Here it's currently too complicated not to use an fd that is a webqtlFormData
     #fd = webqtlFormData.webqtlFormData(request.args)
-    #print("stp y1:", pf(vars(fd)))
+    #logger.info("stp y1:", pf(vars(fd)))
     template_vars = show_trait.ShowTrait(request.args)
-    #print("js_data before dump:", template_vars.js_data)
+    #logger.info("js_data before dump:", template_vars.js_data)
     template_vars.js_data = json.dumps(template_vars.js_data,
                                        default=json_default_handler,
                                        indent="   ")
     # Sorting the keys messes up the ordered dictionary, so don't do that
                                        #sort_keys=True)
 
-    #print("js_data after dump:", template_vars.js_data)
-    #print("show_trait template_vars:", pf(template_vars.__dict__))
+    #logger.info("js_data after dump:", template_vars.js_data)
+    #logger.info("show_trait template_vars:", pf(template_vars.__dict__))
     return render_template("show_trait.html", **template_vars.__dict__)
 
 @app.route("/heatmap", methods=('POST',))
 def heatmap_page():
-    print("In heatmap, request.form is:", pf(request.form))
+    logger.info("In heatmap, request.form is:", pf(request.form))
 
     start_vars = request.form
     temp_uuid = uuid.uuid4()
@@ -350,17 +347,17 @@ def heatmap_page():
     if traits[0] != "":
         version = "v5"
         key = "heatmap:{}:".format(version) + json.dumps(start_vars, sort_keys=True)
-        print("key is:", pf(key))
+        logger.info("key is:", pf(key))
         with Bench("Loading cache"):
             result = Redis.get(key)
 
         if result:
-            print("Cache hit!!!")
+            logger.info("Cache hit!!!")
             with Bench("Loading results"):
                 result = pickle.loads(result)
 
         else:
-            print("Cache miss!!!")
+            logger.info("Cache miss!!!")
 
             template_vars = heatmap.Heatmap(request.form, temp_uuid)
             template_vars.js_data = json.dumps(template_vars.js_data,
@@ -370,10 +367,10 @@ def heatmap_page():
             result = template_vars.__dict__
 
             for item in template_vars.__dict__.keys():
-                print("  ---**--- {}: {}".format(type(template_vars.__dict__[item]), item))
+                logger.info("  ---**--- {}: {}".format(type(template_vars.__dict__[item]), item))
 
             pickled_result = pickle.dumps(result, pickle.HIGHEST_PROTOCOL)
-            print("pickled result length:", len(pickled_result))
+            logger.info("pickled result length:", len(pickled_result))
             Redis.set(key, pickled_result)
             Redis.expire(key, 60*60)
 
@@ -429,7 +426,7 @@ def marker_regression_page():
         'mapmethod_rqtl_geno',
         'mapmodel_rqtl_geno'
     )
-    print("Marker regression called with initial_start_vars:", initial_start_vars)
+    logger.info("Marker regression called with initial_start_vars:", initial_start_vars)
     start_vars = {}
     for key, value in initial_start_vars.iteritems():
         if key in wanted or key.startswith(('value:')):
@@ -437,21 +434,21 @@ def marker_regression_page():
 
     version = "v3"
     key = "marker_regression:{}:".format(version) + json.dumps(start_vars, sort_keys=True)
-    print("key is:", pf(key))
+    logger.info("key is:", pf(key))
     with Bench("Loading cache"):
         result = None # Just for testing
         #result = Redis.get(key)
 
-    #print("************************ Starting result *****************")
-    #print("result is [{}]: {}".format(type(result), result))
-    #print("************************ Ending result ********************")
+    #logger.info("************************ Starting result *****************")
+    #logger.info("result is [{}]: {}".format(type(result), result))
+    #logger.info("************************ Ending result ********************")
 
     if result:
-        print("Cache hit!!!")
+        logger.info("Cache hit!!!")
         with Bench("Loading results"):
             result = pickle.loads(result)
     else:
-        print("Cache miss!!!")
+        logger.info("Cache miss!!!")
         with Bench("Total time in MarkerRegression"):
             template_vars = marker_regression.MarkerRegression(start_vars, temp_uuid)
 
@@ -464,9 +461,9 @@ def marker_regression_page():
         if result['pair_scan']:
             with Bench("Rendering template"):
                 img_path = result['pair_scan_filename']
-                print("img_path:", img_path)
+                logger.info("img_path:", img_path)
                 initial_start_vars = request.form
-                print("initial_start_vars:", initial_start_vars)
+                logger.info("initial_start_vars:", initial_start_vars)
                 imgfile = open(TEMPDIR + img_path, 'rb')
                 imgdata = imgfile.read()
                 imgB64 = imgdata.encode("base64")
@@ -475,12 +472,12 @@ def marker_regression_page():
                 rendered_template = render_template("pair_scan_results.html", **result)
         else:
             #for item in template_vars.__dict__.keys():
-            #    print("  ---**--- {}: {}".format(type(template_vars.__dict__[item]), item))
+            #    logger.info("  ---**--- {}: {}".format(type(template_vars.__dict__[item]), item))
 
             gn1_template_vars = marker_regression_gn1.MarkerRegression(result).__dict__
 
             pickled_result = pickle.dumps(result, pickle.HIGHEST_PROTOCOL)
-            print("pickled result length:", len(pickled_result))
+            logger.info("pickled result length:", len(pickled_result))
             Redis.set(key, pickled_result)
             Redis.expire(key, 1*60)
 
@@ -490,9 +487,9 @@ def marker_regression_page():
     # with Bench("Rendering template"):
         # if result['pair_scan'] == True:
             # img_path = result['pair_scan_filename']
-            # print("img_path:", img_path)
+            # logger.info("img_path:", img_path)
             # initial_start_vars = request.form
-            # print("initial_start_vars:", initial_start_vars)
+            # logger.info("initial_start_vars:", initial_start_vars)
             # imgfile = open(TEMPDIR + '/' + img_path, 'rb')
             # imgdata = imgfile.read()
             # imgB64 = imgdata.encode("base64")
@@ -508,7 +505,7 @@ def marker_regression_page():
 
 @app.route("/export", methods = ('POST',))
 def export():
-    print("request.form:", request.form)
+    logger.info("request.form:", request.form)
     svg_xml = request.form.get("data", "Invalid data")
     filename = request.form.get("filename", "manhattan_plot_snp")
     response = Response(svg_xml, mimetype="image/svg+xml")
@@ -518,9 +515,9 @@ def export():
 @app.route("/export_pdf", methods = ('POST',))
 def export_pdf():
     import cairosvg
-    print("request.form:", request.form)
+    logger.info("request.form:", request.form)
     svg_xml = request.form.get("data", "Invalid data")
-    print("svg_xml:", svg_xml)
+    logger.info("svg_xml:", svg_xml)
     filename = request.form.get("filename", "interval_map_pdf")
     filepath = GENERATED_IMAGE_DIR+filename
     pdf_file = cairosvg.svg2pdf(bytestring=svg_xml)
@@ -530,14 +527,14 @@ def export_pdf():
 
 @app.route("/corr_compute", methods=('POST',))
 def corr_compute_page():
-    print("In corr_compute, request.form is:", pf(request.form))
+    logger.info("In corr_compute, request.form is:", pf(request.form))
     #fd = webqtlFormData.webqtlFormData(request.form)
     template_vars = show_corr_results.CorrelationResults(request.form)
     return render_template("correlation_page.html", **template_vars.__dict__)
 
 @app.route("/corr_matrix", methods=('POST',))
 def corr_matrix_page():
-    print("In corr_matrix, request.form is:", pf(request.form))
+    logger.info("In corr_matrix, request.form is:", pf(request.form))
 
     start_vars = request.form
     traits = [trait.strip() for trait in start_vars['trait_list'].split(',')]
@@ -563,7 +560,7 @@ def corr_scatter_plot_page():
 # Todo: Can we simplify this? -Sam
 def sharing_info_page():
     """Info page displayed when the user clicks the "Info" button next to the dataset selection"""
-    print("In sharing_info_page")
+    logger.info("In sharing_info_page")
     fd = webqtlFormData.webqtlFormData(request.args)
     template_vars = SharingInfoPage.SharingInfoPage(fd)
     return template_vars
@@ -594,7 +591,7 @@ def json_default_handler(obj):
     if hasattr(obj, '__dict__'):
         return obj.__dict__
     #elif type(obj) == "Dataset":
-    #     print("Not going to serialize Dataset")
+    #     logger.info("Not going to serialize Dataset")
     #    return None
     else:
         raise TypeError, 'Object of type %s with value of %s is not JSON serializable' % (