aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/main.yml4
-rw-r--r--README.md17
-rwxr-xr-xbin/genenetwork27
-rw-r--r--doc/README.org2
-rw-r--r--doc/elasticsearch.org247
-rw-r--r--test/requests/parametrized_test.py32
-rwxr-xr-xtest/requests/test-website.py1
-rw-r--r--test/requests/test_forgot_password.py50
-rw-r--r--test/requests/test_login_github.py47
-rw-r--r--test/requests/test_login_local.py57
-rw-r--r--test/requests/test_login_orcid.py47
-rw-r--r--test/requests/test_registration.py36
-rw-r--r--wqflask/maintenance/quantile_normalize.py18
-rw-r--r--wqflask/utility/authentication_tools.py2
-rw-r--r--wqflask/utility/elasticsearch_tools.py121
-rw-r--r--wqflask/utility/hmac.py3
-rw-r--r--wqflask/utility/redis_tools.py2
-rw-r--r--wqflask/utility/tools.py5
-rw-r--r--wqflask/wqflask/__init__.py14
-rw-r--r--wqflask/wqflask/decorators.py49
-rw-r--r--wqflask/wqflask/metadata_edits.py340
-rw-r--r--wqflask/wqflask/resource_manager.py3
-rw-r--r--wqflask/wqflask/show_trait/show_trait.py30
-rw-r--r--wqflask/wqflask/templates/admin/manage_resource.html78
-rw-r--r--wqflask/wqflask/templates/edit_phenotype.html4
-rw-r--r--wqflask/wqflask/templates/edit_probeset.html70
-rw-r--r--wqflask/wqflask/templates/show_trait_details.html10
-rw-r--r--wqflask/wqflask/user_session.py1
-rw-r--r--wqflask/wqflask/views.py305
29 files changed, 505 insertions, 1097 deletions
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 8e2c7966..0cf4557f 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -44,6 +44,8 @@ jobs:
/gn2-profile/bin/screen -dm bash -c "env GN2_PROFILE=/gn2-profile \
TMPDIR=/tmp SERVER_PORT=5004 \
WEBSERVER_MODE=DEBUG LOG_LEVEL=DEBUG \
+ GN_PROXY_URL='http://localhost:8080' \
+ GN3_LOCAL_URL='http://localhost:8081' \
GENENETWORK_FILES=/genotype_files/ bin/genenetwork2 \
etc/default_settings.py"
@@ -52,6 +54,8 @@ jobs:
env GN2_PROFILE=/gn2-profile \
TMPDIR=/tmp SERVER_PORT=5004 \
WEBSERVER_MODE=DEBUG LOG_LEVEL=DEBUG \
+ GN_PROXY_URL='http://localhost:8080' \
+ GN3_LOCAL_URL='http://localhost:8081' \
GENENETWORK_FILES=/genotype_files/ bin/genenetwork2 \
etc/default_settings.py -c -m unittest discover -v
diff --git a/README.md b/README.md
index 003cfd7b..6921d299 100644
--- a/README.md
+++ b/README.md
@@ -30,7 +30,11 @@ genenetwork2
A quick example is
```sh
-env GN2_PROFILE=~/opt/gn-latest SERVER_PORT=5300 GENENETWORK_FILES=~/data/gn2_data/ ./bin/genenetwork2 ./etc/default_settings.py -gunicorn-dev
+env GN2_PROFILE=~/opt/gn-latest SERVER_PORT=5300 \
+ GENENETWORK_FILES=~/data/gn2_data/ \
+ GN_PROXY_URL="http://localhost:8080"\
+ GN3_LOCAL_URL="http://localhost:8081"\
+ ./bin/genenetwork2 ./etc/default_settings.py -gunicorn-dev
```
For full examples (you may need to set a number of environment
@@ -59,7 +63,12 @@ We are building 'Mechanical Rob' automated testing using Python
which can be run with:
```sh
-env GN2_PROFILE=~/opt/gn-latest ./bin/genenetwork2 ./etc/default_settings.py -c ../test/requests/test-website.py -a http://localhost:5003
+env GN2_PROFILE=~/opt/gn-latest \
+ ./bin/genenetwork2 \
+ GN_PROXY_URL="http://localhost:8080" \
+ GN3_LOCAL_URL="http://localhost:8081 "\
+ ./etc/default_settings.py -c \
+ ../test/requests/test-website.py -a http://localhost:5003
```
The GN2_PROFILE is the Guix profile that contains all
@@ -87,9 +96,9 @@ runcmd coverage html
The `runcmd` and `runpython` are shell aliases defined in the following way:
```sh
-alias runpython="env GN2_PROFILE=~/opt/gn-latest TMPDIR=/tmp SERVER_PORT=5004 GENENETWORK_FILES=/gnu/data/gn2_data/ ./bin/genenetwork2
+alias runpython="env GN2_PROFILE=~/opt/gn-latest TMPDIR=/tmp SERVER_PORT=5004 GENENETWORK_FILES=/gnu/data/gn2_data/ GN_PROXY_URL="http://localhost:8080" GN3_LOCAL_URL="http://localhost:8081" ./bin/genenetwork2
-alias runcmd="time env GN2_PROFILE=~/opt/gn-latest TMPDIR=//tmp SERVER_PORT=5004 GENENETWORK_FILES=/gnu/data/gn2_data/ ./bin/genenetwork2 ./etc/default_settings.py -cli"
+alias runcmd="time env GN2_PROFILE=~/opt/gn-latest TMPDIR=//tmp SERVER_PORT=5004 GENENETWORK_FILES=/gnu/data/gn2_data/ GN_PROXY_URL="http://localhost:8080" GN3_LOCAL_URL="http://localhost:8081" ./bin/genenetwork2 ./etc/default_settings.py -cli"
```
Replace some of the env variables as per your use case.
diff --git a/bin/genenetwork2 b/bin/genenetwork2
index 2b94b2a2..5f714d2e 100755
--- a/bin/genenetwork2
+++ b/bin/genenetwork2
@@ -101,13 +101,6 @@ fi
export GN2_SETTINGS=$settings # Python
echo GN2_SETTINGS=$settings
-# This is a temporary hack to inject ES - should have added python2-elasticsearch package to guix instead
-# if [ -z $ELASTICSEARCH_PROFILE ]; then
-# echo -e "WARNING: Elastic Search profile has not been set - use ELASTICSEARCH_PROFILE";
-# else
-# PYTHONPATH="$PYTHONPATH${PYTHONPATH:+:}$ELASTICSEARCH_PROFILE/lib/python3.8/site-packages"
-# fi
-
if [ -z $GN2_PROFILE ] ; then
echo "WARNING: GN2_PROFILE has not been set - you need the environment, so I hope you know what you are doing!"
export GN2_PROFILE=$(dirname $(dirname $(which genenetwork2)))
diff --git a/doc/README.org b/doc/README.org
index 1236016e..8839aefc 100644
--- a/doc/README.org
+++ b/doc/README.org
@@ -81,14 +81,12 @@ GeneNetwork2 with
: source ~/opt/guix-pull/etc/profile
: git clone https://git.genenetwork.org/guix-bioinformatics/guix-bioinformatics.git ~/guix-bioinformatics
: cd ~/guix-bioinformatics
-: git pull
: env GUIX_PACKAGE_PATH=$HOME/guix-bioinformatics guix package -i genenetwork2 -p ~/opt/genenetwork2
you probably also need guix-past (the upstream channel for older packages):
: git clone https://gitlab.inria.fr/guix-hpc/guix-past.git ~/guix-past
: cd ~/guix-past
-: git pull
: env GUIX_PACKAGE_PATH=$HOME/guix-bioinformatics:$HOME/guix-past/modules ~/opt/guix-pull/bin/guix package -i genenetwork2 -p ~/opt/genenetwork2
ignore the warnings. Guix should install the software without trying
diff --git a/doc/elasticsearch.org b/doc/elasticsearch.org
deleted file mode 100644
index 864a8363..00000000
--- a/doc/elasticsearch.org
+++ /dev/null
@@ -1,247 +0,0 @@
-* Elasticsearch
-
-** Introduction
-
-GeneNetwork uses elasticsearch (ES) for all things considered
-'state'. One example is user collections, another is user management.
-
-** Example
-
-To get the right environment, first you can get a python REPL with something like
-
-: env GN2_PROFILE=~/opt/gn-latest ./bin/genenetwork2 ../etc/default_settings.py -cli python
-
-(make sure to use the correct GN2_PROFILE!)
-
-Next try
-
-#+BEGIN_SRC python
-
-from elasticsearch import Elasticsearch, TransportError
-
-es = Elasticsearch([{ "host": 'localhost', "port": '9200' }])
-
-# Dump all data
-
-es.search("*")
-
-# To fetch an E-mail record from the users index
-
-record = es.search(
- index = 'users', doc_type = 'local', body = {
- "query": { "match": { "email_address": "myname@email.com" } }
- })
-
-# It is also possible to do wild card matching
-
-q = { "query": { "wildcard" : { "full_name" : "pjot*" } }}
-es.search(index = 'users', doc_type = 'local', body = q)
-
-# To get elements from that record:
-
-record['hits']['hits'][0][u'_source']['full_name']
-u'Pjotr'
-
-record['hits']['hits'][0][u'_source']['email_address']
-u"myname@email.com"
-
-#+END_SRC
-
-** Health
-
-ES provides support for checking its health:
-
-: curl -XGET http://localhost:9200/_cluster/health?pretty=true
-
-#+BEGIN_SRC json
-
-
- {
- "cluster_name" : "asgard",
- "status" : "yellow",
- "timed_out" : false,
- "number_of_nodes" : 1,
- "number_of_data_nodes" : 1,
- "active_primary_shards" : 5,
- "active_shards" : 5,
- "relocating_shards" : 0,
- "initializing_shards" : 0,
- "unassigned_shards" : 5
- }
-
-#+END_SRC
-
-Yellow means just one instance is running (no worries).
-
-To get full cluster info
-
-: curl -XGET "localhost:9200/_cluster/stats?human&pretty"
-
-#+BEGIN_SRC json
-{
- "_nodes" : {
- "total" : 1,
- "successful" : 1,
- "failed" : 0
- },
- "cluster_name" : "elasticsearch",
- "timestamp" : 1529050366452,
- "status" : "yellow",
- "indices" : {
- "count" : 3,
- "shards" : {
- "total" : 15,
- "primaries" : 15,
- "replication" : 0.0,
- "index" : {
- "shards" : {
- "min" : 5,
- "max" : 5,
- "avg" : 5.0
- },
- "primaries" : {
- "min" : 5,
- "max" : 5,
- "avg" : 5.0
- },
- "replication" : {
- "min" : 0.0,
- "max" : 0.0,
- "avg" : 0.0
- }
- }
- },
- "docs" : {
- "count" : 14579,
- "deleted" : 0
- },
- "store" : {
- "size" : "44.7mb",
- "size_in_bytes" : 46892794
- },
- "fielddata" : {
- "memory_size" : "0b",
- "memory_size_in_bytes" : 0,
- "evictions" : 0
- },
- "query_cache" : {
- "memory_size" : "0b",
- "memory_size_in_bytes" : 0,
- "total_count" : 0,
- "hit_count" : 0,
- "miss_count" : 0,
- "cache_size" : 0,
- "cache_count" : 0,
- "evictions" : 0
- },
- "completion" : {
- "size" : "0b",
- "size_in_bytes" : 0
- },
- "segments" : {
- "count" : 24,
- "memory" : "157.3kb",
- "memory_in_bytes" : 161112,
- "terms_memory" : "122.6kb",
- "terms_memory_in_bytes" : 125569,
- "stored_fields_memory" : "15.3kb",
- "stored_fields_memory_in_bytes" : 15728,
- "term_vectors_memory" : "0b",
- "term_vectors_memory_in_bytes" : 0,
- "norms_memory" : "10.8kb",
- "norms_memory_in_bytes" : 11136,
- "points_memory" : "111b",
- "points_memory_in_bytes" : 111,
- "doc_values_memory" : "8.3kb",
- "doc_values_memory_in_bytes" : 8568,
- "index_writer_memory" : "0b",
- "index_writer_memory_in_bytes" : 0,
- "version_map_memory" : "0b",
- "version_map_memory_in_bytes" : 0,
- "fixed_bit_set" : "0b",
- "fixed_bit_set_memory_in_bytes" : 0,
- "max_unsafe_auto_id_timestamp" : -1,
- "file_sizes" : { }
- }
- },
- "nodes" : {
- "count" : {
- "total" : 1,
- "data" : 1,
- "coordinating_only" : 0,
- "master" : 1,
- "ingest" : 1
- },
- "versions" : [
- "6.2.1"
- ],
- "os" : {
- "available_processors" : 16,
- "allocated_processors" : 16,
- "names" : [
- {
- "name" : "Linux",
- "count" : 1
- }
- ],
- "mem" : {
- "total" : "125.9gb",
- "total_in_bytes" : 135189286912,
- "free" : "48.3gb",
- "free_in_bytes" : 51922628608,
- "used" : "77.5gb",
- "used_in_bytes" : 83266658304,
- "free_percent" : 38,
- "used_percent" : 62
- }
- },
- "process" : {
- "cpu" : {
- "percent" : 0
- },
- "open_file_descriptors" : {
- "min" : 415,
- "max" : 415,
- "avg" : 415
- }
- },
- "jvm" : {
- "max_uptime" : "1.9d",
- "max_uptime_in_millis" : 165800616,
- "versions" : [
- {
- "version" : "9.0.4",
- "vm_name" : "OpenJDK 64-Bit Server VM",
- "vm_version" : "9.0.4+11",
- "vm_vendor" : "Oracle Corporation",
- "count" : 1
- }
- ],
- "mem" : {
- "heap_used" : "1.1gb",
- "heap_used_in_bytes" : 1214872032,
- "heap_max" : "23.8gb",
- "heap_max_in_bytes" : 25656426496
- },
- "threads" : 110
- },
- "fs" : {
- "total" : "786.4gb",
- "total_in_bytes" : 844400918528,
- "free" : "246.5gb",
- "free_in_bytes" : 264688160768,
- "available" : "206.5gb",
- "available_in_bytes" : 221771468800
- },
- "plugins" : [ ],
- "network_types" : {
- "transport_types" : {
- "netty4" : 1
- },
- "http_types" : {
- "netty4" : 1
- }
- }
- }
-}
-#+BEGIN_SRC json
diff --git a/test/requests/parametrized_test.py b/test/requests/parametrized_test.py
deleted file mode 100644
index 50003850..00000000
--- a/test/requests/parametrized_test.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import logging
-import unittest
-from wqflask import app
-from utility.elasticsearch_tools import get_elasticsearch_connection, get_user_by_unique_column
-from elasticsearch import Elasticsearch, TransportError
-
-class ParametrizedTest(unittest.TestCase):
-
- def __init__(self, methodName='runTest', gn2_url="http://localhost:5003", es_url="localhost:9200"):
- super(ParametrizedTest, self).__init__(methodName=methodName)
- self.gn2_url = gn2_url
- self.es_url = es_url
-
- def setUp(self):
- self.es = get_elasticsearch_connection()
- self.es_cleanup = []
-
- es_logger = logging.getLogger("elasticsearch")
- es_logger.setLevel(app.config.get("LOG_LEVEL"))
- es_logger.addHandler(
- logging.FileHandler("/tmp/es_TestRegistrationInfo.log"))
- es_trace_logger = logging.getLogger("elasticsearch.trace")
- es_trace_logger.addHandler(
- logging.FileHandler("/tmp/es_TestRegistrationTrace.log"))
-
- def tearDown(self):
- from time import sleep
- self.es.delete_by_query(
- index="users"
- , doc_type="local"
- , body={"query":{"match":{"email_address":"test@user.com"}}})
- sleep(1)
diff --git a/test/requests/test-website.py b/test/requests/test-website.py
index 8bfb47c2..d619a7d5 100755
--- a/test/requests/test-website.py
+++ b/test/requests/test-website.py
@@ -43,7 +43,6 @@ def dummy(args_obj, parser):
def integration_tests(args_obj, parser):
gn2_url = args_obj.host
- es_url = app.config.get("ELASTICSEARCH_HOST")+":"+str(app.config.get("ELASTICSEARCH_PORT"))
run_integration_tests(gn2_url, es_url)
def initTest(klass, gn2_url, es_url):
diff --git a/test/requests/test_forgot_password.py b/test/requests/test_forgot_password.py
deleted file mode 100644
index 346524bc..00000000
--- a/test/requests/test_forgot_password.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import requests
-from utility.elasticsearch_tools import get_user_by_unique_column
-from parameterized import parameterized
-from parametrized_test import ParametrizedTest
-
-passwork_reset_link = ''
-forgot_password_page = None
-
-class TestForgotPassword(ParametrizedTest):
-
- def setUp(self):
- super(TestForgotPassword, self).setUp()
- self.forgot_password_url = self.gn2_url+"/n/forgot_password_submit"
- def send_email(to_addr, msg, fromaddr="no-reply@genenetwork.org"):
- print("CALLING: send_email_mock()")
- email_data = {
- "to_addr": to_addr
- , "msg": msg
- , "fromaddr": from_addr}
-
- data = {
- "es_connection": self.es,
- "email_address": "test@user.com",
- "full_name": "Test User",
- "organization": "Test Organisation",
- "password": "test_password",
- "password_confirm": "test_password"
- }
-
-
- def testWithoutEmail(self):
- data = {"email_address": ""}
- error_notification = '<div class="alert alert-danger">You MUST provide an email</div>'
- result = requests.post(self.forgot_password_url, data=data)
- self.assertEqual(result.url, self.gn2_url+"/n/forgot_password")
- self.assertTrue(
- result.content.find(error_notification) >= 0
- , "Error message should be displayed but was not")
-
- def testWithNonExistingEmail(self):
- # Monkey patching doesn't work, so simply test that getting by email
- # returns the correct data
- user = get_user_by_unique_column(self.es, "email_address", "non-existent@domain.com")
- self.assertTrue(user is None, "Should not find non-existent user")
-
- def testWithExistingEmail(self):
- # Monkey patching doesn't work, so simply test that getting by email
- # returns the correct data
- user = get_user_by_unique_column(self.es, "email_address", "test@user.com")
- self.assertTrue(user is not None, "Should find user")
diff --git a/test/requests/test_login_github.py b/test/requests/test_login_github.py
deleted file mode 100644
index 1bf4f695..00000000
--- a/test/requests/test_login_github.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import uuid
-import requests
-from time import sleep
-from wqflask import app
-from parameterized import parameterized
-from parametrized_test import ParametrizedTest
-
-login_link_text = '<a id="login_in" href="/n/login">Sign in</a>'
-logout_link_text = '<a id="login_out" title="Signed in as ." href="/n/logout">Sign out</a>'
-uid = str(uuid.uuid4())
-
-class TestLoginGithub(ParametrizedTest):
-
- def setUp(self):
- super(TestLoginGithub, self).setUp()
- data = {
- "user_id": uid
- , "name": "A. T. Est User"
- , "github_id": 693024
- , "user_url": "https://fake-github.com/atestuser"
- , "login_type": "github"
- , "organization": ""
- , "active": 1
- , "confirmed": 1
- }
- self.es.create(index="users", doc_type="local", body=data, id=uid)
- sleep(1)
-
- def tearDown(self):
- super(TestLoginGithub, self).tearDown()
- self.es.delete(index="users", doc_type="local", id=uid)
-
- def testLoginUrl(self):
- login_button_text = '<a href="https://github.com/login/oauth/authorize?client_id=' + app.config.get("GITHUB_CLIENT_ID") + '&amp;client_secret=' + app.config.get("GITHUB_CLIENT_SECRET") + '" title="Login with GitHub" class="btn btn-info btn-group">Login with Github</a>'
- result = requests.get(self.gn2_url+"/n/login")
- index = result.content.find(login_button_text)
- self.assertTrue(index >= 0, "Should have found `Login with Github` button")
-
- @parameterized.expand([
- ("1234", login_link_text, "Login should have failed with non-existing user")
- , (uid, logout_link_text, "Login should have been successful with existing user")
- ])
- def testLogin(self, test_uid, expected, message):
- url = self.gn2_url+"/n/login?type=github&uid="+test_uid
- result = requests.get(url)
- index = result.content.find(expected)
- self.assertTrue(index >= 0, message)
diff --git a/test/requests/test_login_local.py b/test/requests/test_login_local.py
deleted file mode 100644
index 6691d135..00000000
--- a/test/requests/test_login_local.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import requests
-from parameterized import parameterized
-from parametrized_test import ParametrizedTest
-
-login_link_text = '<a id="login_in" href="/n/login">Sign in</a>'
-logout_link_text = '<a id="login_out" title="Signed in as ." href="/n/logout">Sign out</a>'
-
-class TestLoginLocal(ParametrizedTest):
-
- def setUp(self):
- super(TestLoginLocal, self).setUp()
- self.login_url = self.gn2_url +"/n/login"
- data = {
- "es_connection": self.es,
- "email_address": "test@user.com",
- "full_name": "Test User",
- "organization": "Test Organisation",
- "password": "test_password",
- "password_confirm": "test_password"
- }
-
-
- @parameterized.expand([
- (
- {
- "email_address": "non@existent.email",
- "password": "doesitmatter?"
- }, login_link_text, "Login should have failed with the wrong user details."),
- (
- {
- "email_address": "test@user.com",
- "password": "test_password"
- }, logout_link_text, "Login should have been successful with correct user details and neither import_collections nor remember_me set"),
- (
- {
- "email_address": "test@user.com",
- "password": "test_password",
- "import_collections": "y"
- }, logout_link_text, "Login should have been successful with correct user details and only import_collections set"),
- (
- {
- "email_address": "test@user.com",
- "password": "test_password",
- "remember_me": "y"
- }, logout_link_text, "Login should have been successful with correct user details and only remember_me set"),
- (
- {
- "email_address": "test@user.com",
- "password": "test_password",
- "remember_me": "y",
- "import_collections": "y"
- }, logout_link_text, "Login should have been successful with correct user details, and both remember_me, and import_collections set")
- ])
- def testLogin(self, data, expected, message):
- result = requests.post(self.login_url, data=data)
- index = result.content.find(expected)
- self.assertTrue(index >= 0, message)
diff --git a/test/requests/test_login_orcid.py b/test/requests/test_login_orcid.py
deleted file mode 100644
index ea15642e..00000000
--- a/test/requests/test_login_orcid.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import uuid
-import requests
-from time import sleep
-from wqflask import app
-from parameterized import parameterized
-from parametrized_test import ParametrizedTest
-
-login_link_text = '<a id="login_in" href="/n/login">Sign in</a>'
-logout_link_text = '<a id="login_out" title="Signed in as ." href="/n/logout">Sign out</a>'
-uid = str(uuid.uuid4())
-
-class TestLoginOrcid(ParametrizedTest):
-
- def setUp(self):
- super(TestLoginOrcid, self).setUp()
- data = {
- "user_id": uid
- , "name": "A. T. Est User"
- , "orcid": 345872
- , "user_url": "https://fake-orcid.org/atestuser"
- , "login_type": "orcid"
- , "organization": ""
- , "active": 1
- , "confirmed": 1
- }
- self.es.create(index="users", doc_type="local", body=data, id=uid)
- sleep(1)
-
- def tearDown(self):
- super(TestLoginOrcid, self).tearDown()
- self.es.delete(index="users", doc_type="local", id=uid)
-
- def testLoginUrl(self):
- login_button_text = 'a href="https://sandbox.orcid.org/oauth/authorize?response_type=code&amp;scope=/authenticate&amp;show_login=true&amp;client_id=' + app.config.get("ORCID_CLIENT_ID") + '&amp;client_secret=' + app.config.get("ORCID_CLIENT_SECRET") + '" title="Login with ORCID" class="btn btn-info btn-group">Login with ORCID</a>'
- result = requests.get(self.gn2_url+"/n/login")
- index = result.content.find(login_button_text)
- self.assertTrue(index >= 0, "Should have found `Login with ORCID` button")
-
- @parameterized.expand([
- ("1234", login_link_text, "Login should have failed with non-existing user")
- , (uid, logout_link_text, "Login should have been successful with existing user")
- ])
- def testLogin(self, test_uid, expected, message):
- url = self.gn2_url+"/n/login?type=orcid&uid="+test_uid
- result = requests.get(url)
- index = result.content.find(expected)
- self.assertTrue(index >= 0, message)
diff --git a/test/requests/test_registration.py b/test/requests/test_registration.py
index 0047e8a6..5d08bf58 100644
--- a/test/requests/test_registration.py
+++ b/test/requests/test_registration.py
@@ -1,31 +1,25 @@
import sys
import requests
-from parametrized_test import ParametrizedTest
class TestRegistration(ParametrizedTest):
- def tearDown(self):
- for item in self.es_cleanup:
- self.es.delete(index="users", doc_type="local", id=item["_id"])
def testRegistrationPage(self):
- if self.es.ping():
- data = {
- "email_address": "test@user.com",
- "full_name": "Test User",
- "organization": "Test Organisation",
- "password": "test_password",
- "password_confirm": "test_password"
- }
- requests.post(self.gn2_url+"/n/register", data)
- response = self.es.search(
- index="users"
- , doc_type="local"
- , body={
- "query": {"match": {"email_address": "test@user.com"}}})
- self.assertEqual(len(response["hits"]["hits"]), 1)
- else:
- self.skipTest("The elasticsearch server is down")
+ data = {
+ "email_address": "test@user.com",
+ "full_name": "Test User",
+ "organization": "Test Organisation",
+ "password": "test_password",
+ "password_confirm": "test_password"
+ }
+ requests.post(self.gn2_url+"/n/register", data)
+ response = self.es.search(
+ index="users"
+ , doc_type="local"
+ , body={
+ "query": {"match": {"email_address": "test@user.com"}}})
+ self.assertEqual(len(response["hits"]["hits"]), 1)
+
def main(gn2, es):
import unittest
diff --git a/wqflask/maintenance/quantile_normalize.py b/wqflask/maintenance/quantile_normalize.py
index 0cc963e5..32780ca6 100644
--- a/wqflask/maintenance/quantile_normalize.py
+++ b/wqflask/maintenance/quantile_normalize.py
@@ -5,14 +5,10 @@ import urllib.parse
import numpy as np
import pandas as pd
-from elasticsearch import Elasticsearch, TransportError
-from elasticsearch.helpers import bulk
from flask import Flask, g, request
from wqflask import app
-from utility.elasticsearch_tools import get_elasticsearch_connection
-from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT, SQL_URI
def parse_db_uri():
@@ -106,20 +102,6 @@ if __name__ == '__main__':
Conn = MySQLdb.Connect(**parse_db_uri())
Cursor = Conn.cursor()
- # es = Elasticsearch([{
- # "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT
- # }], timeout=60) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
-
- es = get_elasticsearch_connection(for_user=False)
-
- #input_filename = "/home/zas1024/cfw_data/" + sys.argv[1] + ".txt"
- #input_df = create_dataframe(input_filename)
- #output_df = quantileNormalize(input_df)
-
- #output_df.to_csv('quant_norm.csv', sep='\t')
-
- #out_filename = sys.argv[1][:-4] + '_quantnorm.txt'
-
success, _ = bulk(es, set_data(sys.argv[1]))
response = es.search(
diff --git a/wqflask/utility/authentication_tools.py b/wqflask/utility/authentication_tools.py
index c4801c8c..afea69e1 100644
--- a/wqflask/utility/authentication_tools.py
+++ b/wqflask/utility/authentication_tools.py
@@ -1,7 +1,6 @@
import json
import requests
-from deprecated import deprecated
from flask import g
from base import webqtlConfig
@@ -127,7 +126,6 @@ def check_owner(dataset=None, trait_id=None, resource_id=None):
return False
-@deprecated
def check_owner_or_admin(dataset=None, trait_id=None, resource_id=None):
if not resource_id:
if dataset.type == "Temp":
diff --git a/wqflask/utility/elasticsearch_tools.py b/wqflask/utility/elasticsearch_tools.py
deleted file mode 100644
index eae3ba03..00000000
--- a/wqflask/utility/elasticsearch_tools.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Elasticsearch support
-#
-# Some helpful commands to view the database:
-#
-# You can test the server being up with
-#
-# curl -H 'Content-Type: application/json' http://localhost:9200
-#
-# List all indices
-#
-# curl -H 'Content-Type: application/json' 'localhost:9200/_cat/indices?v'
-#
-# To see the users index 'table'
-#
-# curl http://localhost:9200/users
-#
-# To list all user ids
-#
-# curl -H 'Content-Type: application/json' http://localhost:9200/users/local/_search?pretty=true -d '
-# {
-# "query" : {
-# "match_all" : {}
-# },
-# "stored_fields": []
-# }'
-#
-# To view a record
-#
-# curl -H 'Content-Type: application/json' http://localhost:9200/users/local/_search?pretty=true -d '
-# {
-# "query" : {
-# "match" : { "email_address": "pjotr2017@thebird.nl"}
-# }
-# }'
-#
-#
-# To delete the users index and data (dangerous!)
-#
-# curl -XDELETE -H 'Content-Type: application/json' 'localhost:9200/users'
-
-
-from elasticsearch import Elasticsearch, TransportError
-import logging
-
-from utility.logger import getLogger
-logger = getLogger(__name__)
-
-from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
-
-
-def test_elasticsearch_connection():
- es = Elasticsearch(['http://' + ELASTICSEARCH_HOST + \
- ":" + str(ELASTICSEARCH_PORT) + '/'], verify_certs=True)
- if not es.ping():
- logger.warning("Elasticsearch is DOWN")
-
-
-def get_elasticsearch_connection(for_user=True):
- """Return a connection to ES. Returns None on failure"""
- logger.info("get_elasticsearch_connection")
- es = None
- try:
- assert(ELASTICSEARCH_HOST)
- assert(ELASTICSEARCH_PORT)
- logger.info("ES HOST", ELASTICSEARCH_HOST)
-
- es = Elasticsearch([{
- "host": ELASTICSEARCH_HOST, "port": ELASTICSEARCH_PORT
- }], timeout=30, retry_on_timeout=True) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
-
- if for_user:
- setup_users_index(es)
-
- es_logger = logging.getLogger("elasticsearch")
- es_logger.setLevel(logging.INFO)
- es_logger.addHandler(logging.NullHandler())
- except Exception as e:
- logger.error("Failed to get elasticsearch connection", e)
- es = None
-
- return es
-
-
-def setup_users_index(es_connection):
- if es_connection:
- index_settings = {
- "properties": {
- "email_address": {
- "type": "keyword"}}}
-
- es_connection.indices.create(index='users', ignore=400)
- es_connection.indices.put_mapping(
- body=index_settings, index="users", doc_type="local")
-
-
-def get_user_by_unique_column(es, column_name, column_value, index="users", doc_type="local"):
- return get_item_by_unique_column(es, column_name, column_value, index=index, doc_type=doc_type)
-
-
-def save_user(es, user, user_id):
- es_save_data(es, "users", "local", user, user_id)
-
-
-def get_item_by_unique_column(es, column_name, column_value, index, doc_type):
- item_details = None
- try:
- response = es.search(
- index=index, doc_type=doc_type, body={
- "query": {"match": {column_name: column_value}}
- })
- if len(response["hits"]["hits"]) > 0:
- item_details = response["hits"]["hits"][0]["_source"]
- except TransportError as te:
- pass
- return item_details
-
-
-def es_save_data(es, index, doc_type, data_item, data_id,):
- from time import sleep
- es.create(index, doc_type, body=data_item, id=data_id)
- sleep(1) # Delay 1 second to allow indexing
diff --git a/wqflask/utility/hmac.py b/wqflask/utility/hmac.py
index d6e515ed..29891677 100644
--- a/wqflask/utility/hmac.py
+++ b/wqflask/utility/hmac.py
@@ -1,14 +1,11 @@
import hmac
import hashlib
-from deprecated import deprecated
from flask import url_for
from wqflask import app
-@deprecated("This function leads to circular imports. "
- "If possible use wqflask.decorators.create_hmac instead.")
def hmac_creation(stringy):
"""Helper function to create the actual hmac"""
diff --git a/wqflask/utility/redis_tools.py b/wqflask/utility/redis_tools.py
index c2a3b057..a6c5875f 100644
--- a/wqflask/utility/redis_tools.py
+++ b/wqflask/utility/redis_tools.py
@@ -4,7 +4,6 @@ import datetime
import redis # used for collections
-from deprecated import deprecated
from utility.hmac import hmac_creation
from utility.logger import getLogger
logger = getLogger(__name__)
@@ -252,7 +251,6 @@ def get_resource_id(dataset, trait_id=None):
return resource_id
-@deprecated
def get_resource_info(resource_id):
resource_info = Redis.hget("resources", resource_id)
if resource_info:
diff --git a/wqflask/utility/tools.py b/wqflask/utility/tools.py
index 0efe8ca9..f28961ec 100644
--- a/wqflask/utility/tools.py
+++ b/wqflask/utility/tools.py
@@ -287,6 +287,7 @@ JS_GN_PATH = get_setting('JS_GN_PATH')
GITHUB_CLIENT_ID = get_setting('GITHUB_CLIENT_ID')
GITHUB_CLIENT_SECRET = get_setting('GITHUB_CLIENT_SECRET')
+GITHUB_AUTH_URL = ""
if GITHUB_CLIENT_ID != 'UNKNOWN' and GITHUB_CLIENT_SECRET:
GITHUB_AUTH_URL = "https://github.com/login/oauth/authorize?client_id=" + \
GITHUB_CLIENT_ID + "&client_secret=" + GITHUB_CLIENT_SECRET
@@ -301,10 +302,6 @@ if ORCID_CLIENT_ID != 'UNKNOWN' and ORCID_CLIENT_SECRET:
"&redirect_uri=" + GN2_BRANCH_URL + "n/login/orcid_oauth2"
ORCID_TOKEN_URL = get_setting('ORCID_TOKEN_URL')
-ELASTICSEARCH_HOST = get_setting('ELASTICSEARCH_HOST')
-ELASTICSEARCH_PORT = get_setting('ELASTICSEARCH_PORT')
-# import utility.elasticsearch_tools as es
-# es.test_elasticsearch_connection()
SMTP_CONNECT = get_setting('SMTP_CONNECT')
SMTP_USERNAME = get_setting('SMTP_USERNAME')
diff --git a/wqflask/wqflask/__init__.py b/wqflask/wqflask/__init__.py
index 5b2d05d1..169192c7 100644
--- a/wqflask/wqflask/__init__.py
+++ b/wqflask/wqflask/__init__.py
@@ -9,8 +9,11 @@ from typing import Tuple
from urllib.parse import urlparse
from utility import formatting
+from wqflask.access_roles import DataRole, AdminRole
from wqflask.resource_manager import resource_management
+from wqflask.metadata_edits import metadata_edit
+
from wqflask.api.markdown import glossary_blueprint
from wqflask.api.markdown import references_blueprint
from wqflask.api.markdown import links_blueprint
@@ -60,6 +63,7 @@ app.register_blueprint(news_blueprint, url_prefix="/news")
app.register_blueprint(resource_management, url_prefix="/resource-management")
+app.register_blueprint(metadata_edit, url_prefix="/datasets/")
@app.before_request
def before_request():
@@ -67,6 +71,16 @@ def before_request():
g.request_time = lambda: "%.5fs" % (time.time() - g.request_start_time)
+@app.context_processor
+def include_admin_role_class():
+ return {'AdminRole': AdminRole}
+
+
+@app.context_processor
+def include_data_role_class():
+ return {'DataRole': DataRole}
+
+
from wqflask.api import router
from wqflask import group_manager
from wqflask import resource_manager
diff --git a/wqflask/wqflask/decorators.py b/wqflask/wqflask/decorators.py
index 843539ee..1ef8c188 100644
--- a/wqflask/wqflask/decorators.py
+++ b/wqflask/wqflask/decorators.py
@@ -1,9 +1,7 @@
"""This module contains gn2 decorators"""
-import hashlib
-import hmac
import redis
-from flask import current_app, g
+from flask import current_app, g, request
from typing import Dict
from urllib.parse import urljoin
from functools import wraps
@@ -14,19 +12,13 @@ import json
import requests
-def create_hmac(data: str, secret: str) -> str:
- return hmac.new(bytearray(secret, "latin-1"),
- bytearray(data, "utf-8"),
- hashlib.sha1).hexdigest()[:20]
-
-
def login_required(f):
"""Use this for endpoints where login is required"""
@wraps(f)
def wrap(*args, **kwargs):
- user_id = (g.user_session.record.get(b"user_id",
- b"").decode("utf-8") or
- g.user_session.record.get("user_id", ""))
+ user_id = ((g.user_session.record.get(b"user_id") or
+ b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
redis_conn = redis.from_url(current_app.config["REDIS_URL"],
decode_responses=True)
if not redis_conn.hget("users", user_id):
@@ -36,32 +28,25 @@ def login_required(f):
def edit_access_required(f):
- """Use this for endpoints where people with admin or edit privileges are required"""
+ """Use this for endpoints where people with admin or edit privileges
+are required"""
@wraps(f)
def wrap(*args, **kwargs):
resource_id: str = ""
- if kwargs.get("inbredset_id"): # data type: dataset-publish
- resource_id = create_hmac(
- data=("dataset-publish:"
- f"{kwargs.get('inbredset_id')}:"
- f"{kwargs.get('name')}"),
- secret=current_app.config.get("SECRET_HMAC_CODE"))
- if kwargs.get("dataset_name"): # data type: dataset-probe
- resource_id = create_hmac(
- data=("dataset-probeset:"
- f"{kwargs.get('dataset_name')}"),
- secret=current_app.config.get("SECRET_HMAC_CODE"))
- if kwargs.get("resource_id"): # The resource_id is already provided
+ if request.args.get("resource-id"):
+ resource_id = request.args.get("resource-id")
+ elif kwargs.get("resource_id"):
resource_id = kwargs.get("resource_id")
response: Dict = {}
try:
- _user_id = g.user_session.record.get(b"user_id",
- "").decode("utf-8")
+ user_id = ((g.user_session.record.get(b"user_id") or
+ b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
response = json.loads(
requests.get(urljoin(
current_app.config.get("GN2_PROXY"),
("available?resource="
- f"{resource_id}&user={_user_id}"))).content)
+ f"{resource_id}&user={user_id}"))).content)
except:
response = {}
if max([DataRole(role) for role in response.get(
@@ -78,13 +63,14 @@ def edit_admins_access_required(f):
resource_id: str = kwargs.get("resource_id", "")
response: Dict = {}
try:
- _user_id = g.user_session.record.get(b"user_id",
- "").decode("utf-8")
+ user_id = ((g.user_session.record.get(b"user_id") or
+ b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
response = json.loads(
requests.get(urljoin(
current_app.config.get("GN2_PROXY"),
("available?resource="
- f"{resource_id}&user={_user_id}"))).content)
+ f"{resource_id}&user={user_id}"))).content)
except:
response = {}
if max([AdminRole(role) for role in response.get(
@@ -92,4 +78,3 @@ def edit_admins_access_required(f):
return "You need to have edit-admins access", 401
return f(*args, **kwargs)
return wrap
-
diff --git a/wqflask/wqflask/metadata_edits.py b/wqflask/wqflask/metadata_edits.py
new file mode 100644
index 00000000..d232b32b
--- /dev/null
+++ b/wqflask/wqflask/metadata_edits.py
@@ -0,0 +1,340 @@
+import MySQLdb
+import os
+import json
+import datetime
+import difflib
+
+
+from collections import namedtuple
+from flask import (Blueprint, current_app, redirect,
+ flash, g, render_template, request)
+from itertools import groupby
+
+from wqflask.decorators import edit_access_required
+
+from gn3.db import diff_from_dict
+from gn3.db import fetchall
+from gn3.db import fetchone
+from gn3.db import insert
+from gn3.db import update
+from gn3.db.metadata_audit import MetadataAudit
+from gn3.db.phenotypes import Phenotype
+from gn3.db.phenotypes import Probeset
+from gn3.db.phenotypes import Publication
+from gn3.db.phenotypes import PublishXRef
+from gn3.db.phenotypes import probeset_mapping
+from gn3.commands import run_cmd
+from gn3.db.traits import get_trait_csv_sample_data
+from gn3.db.traits import update_sample_data
+
+
+metadata_edit = Blueprint('metadata_edit', __name__)
+
+
+def edit_phenotype(conn, name, dataset_id):
+ publish_xref = fetchone(
+ conn=conn,
+ table="PublishXRef",
+ where=PublishXRef(id_=name,
+ inbred_set_id=dataset_id))
+ phenotype_ = fetchone(
+ conn=conn,
+ table="Phenotype",
+ where=Phenotype(id_=publish_xref.phenotype_id))
+ publication_ = fetchone(
+ conn=conn,
+ table="Publication",
+ where=Publication(id_=publish_xref.publication_id))
+ json_data = fetchall(
+ conn,
+ "metadata_audit",
+ where=MetadataAudit(dataset_id=publish_xref.id_))
+ Edit = namedtuple("Edit", ["field", "old", "new", "diff"])
+ Diff = namedtuple("Diff", ["author", "diff", "timestamp"])
+ diff_data = []
+ for data in json_data:
+ json_ = json.loads(data.json_data)
+ timestamp = json_.get("timestamp")
+ author = json_.get("author")
+ for key, value in json_.items():
+ if isinstance(value, dict):
+ for field, data_ in value.items():
+ diff_data.append(
+ Diff(author=author,
+ diff=Edit(field,
+ data_.get("old"),
+ data_.get("new"),
+ "\n".join(difflib.ndiff(
+ [data_.get("old")],
+ [data_.get("new")]))),
+ timestamp=timestamp))
+ diff_data_ = None
+ if len(diff_data) > 0:
+ diff_data_ = groupby(diff_data, lambda x: x.timestamp)
+ return {
+ "diff": diff_data_,
+ "publish_xref": publish_xref,
+ "phenotype": phenotype_,
+ "publication": publication_,
+ }
+
+
+def edit_probeset(conn, name):
+ probeset_ = fetchone(conn=conn,
+ table="ProbeSet",
+ columns=list(probeset_mapping.values()),
+ where=Probeset(name=name))
+ json_data = fetchall(
+ conn,
+ "metadata_audit",
+ where=MetadataAudit(dataset_id=probeset_.id_))
+ Edit = namedtuple("Edit", ["field", "old", "new", "diff"])
+ Diff = namedtuple("Diff", ["author", "diff", "timestamp"])
+ diff_data = []
+ for data in json_data:
+ json_ = json.loads(data.json_data)
+ timestamp = json_.get("timestamp")
+ author = json_.get("author")
+ for key, value in json_.items():
+ if isinstance(value, dict):
+ for field, data_ in value.items():
+ diff_data.append(
+ Diff(author=author,
+ diff=Edit(field,
+ data_.get("old"),
+ data_.get("new"),
+ "\n".join(difflib.ndiff(
+ [data_.get("old")],
+ [data_.get("new")]))),
+ timestamp=timestamp))
+ diff_data_ = None
+ if len(diff_data) > 0:
+ diff_data_ = groupby(diff_data, lambda x: x.timestamp)
+ return {
+ "diff": diff_data_,
+ "probeset": probeset_,
+ }
+
+
+@metadata_edit.route("/<dataset_id>/traits/<name>")
+@edit_access_required
+def display_phenotype_metadata(dataset_id: str, name: str):
+ conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
+ user=current_app.config.get("DB_USER"),
+ passwd=current_app.config.get("DB_PASS"),
+ host=current_app.config.get("DB_HOST"))
+ _d = edit_phenotype(conn=conn, name=name, dataset_id=dataset_id)
+ return render_template(
+ "edit_phenotype.html",
+ diff=_d.get("diff"),
+ publish_xref=_d.get("publish_xref"),
+ phenotype=_d.get("phenotype"),
+ publication=_d.get("publication"),
+ dataset_id=dataset_id,
+ resource_id=request.args.get("resource-id"),
+ version=os.environ.get("GN_VERSION"),
+ )
+
+
+@metadata_edit.route("/traits/<name>")
+@edit_access_required
+def display_probeset_metadata(name: str):
+ conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
+ user=current_app.config.get("DB_USER"),
+ passwd=current_app.config.get("DB_PASS"),
+ host=current_app.config.get("DB_HOST"))
+ _d = edit_probeset(conn=conn, name=name)
+ return render_template(
+ "edit_probeset.html",
+ diff=_d.get("diff"),
+ probeset=_d.get("probeset"),
+ name=name,
+ resource_id=request.args.get("resource-id"),
+ version=os.environ.get("GN_VERSION"),
+ )
+
+
+@metadata_edit.route("/<dataset_id>/traits/<name>", methods=("POST",))
+@edit_access_required
+def update_phenotype(dataset_id: str, name: str):
+ conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
+ user=current_app.config.get("DB_USER"),
+ passwd=current_app.config.get("DB_PASS"),
+ host=current_app.config.get("DB_HOST"))
+ data_ = request.form.to_dict()
+ TMPDIR = current_app.config.get("TMPDIR")
+ author = ((g.user_session.record.get(b"user_id") or b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
+ phenotype_id = str(data_.get('phenotype-id'))
+ if 'file' not in request.files:
+ flash("No sample-data has been uploaded", "warning")
+ else:
+ file_ = request.files['file']
+ SAMPLE_DATADIR = os.path.join(TMPDIR, "sample-data")
+ if not os.path.exists(SAMPLE_DATADIR):
+ os.makedirs(SAMPLE_DATADIR)
+ if not os.path.exists(os.path.join(SAMPLE_DATADIR,
+ "diffs")):
+ os.makedirs(os.path.join(SAMPLE_DATADIR,
+ "diffs"))
+ if not os.path.exists(os.path.join(SAMPLE_DATADIR,
+ "updated")):
+ os.makedirs(os.path.join(SAMPLE_DATADIR,
+ "updated"))
+ current_time = str(datetime.datetime.now().isoformat())
+ new_file_name = (os.path.join(TMPDIR,
+ "sample-data/updated/",
+ (f"{author}."
+ f"{name}.{phenotype_id}."
+ f"{current_time}.csv")))
+ uploaded_file_name = (os.path.join(
+ TMPDIR,
+ "sample-data/updated/",
+ (f"updated.{author}."
+ f"{request.args.get('resource-id')}."
+ f"{current_time}.csv")))
+ file_.save(new_file_name)
+ publishdata_id = ""
+ lines = []
+ with open(new_file_name, "r") as f:
+ lines = f.read()
+ first_line = lines.split('\n', 1)[0]
+ publishdata_id = first_line.split("Id:")[-1].strip()
+ with open(new_file_name, "w") as f:
+ f.write(lines.split("\n\n")[-1])
+ csv_ = get_trait_csv_sample_data(conn=conn,
+ trait_name=str(name),
+ phenotype_id=str(phenotype_id))
+ with open(uploaded_file_name, "w") as f_:
+ f_.write(csv_.split("\n\n")[-1])
+ r = run_cmd(cmd=("csvdiff "
+ f"'{uploaded_file_name}' '{new_file_name}' "
+ "--format json"))
+ diff_output = (f"{TMPDIR}/sample-data/diffs/"
+ f"{author}.{request.args.get('resource-id')}."
+ f"{current_time}.json")
+ with open(diff_output, "w") as f:
+ dict_ = json.loads(r.get("output"))
+ dict_.update({
+ "author": author,
+ "publishdata_id": publishdata_id,
+ "dataset_id": data_.get("dataset-name"),
+ "timestamp": datetime.datetime.now().strftime(
+ "%Y-%m-%d %H:%M:%S")
+ })
+ f.write(json.dumps(dict_))
+ flash("Sample-data has been successfully uploaded", "success")
+ # Run updates:
+ phenotype_ = {
+ "pre_pub_description": data_.get("pre-pub-desc"),
+ "post_pub_description": data_.get("post-pub-desc"),
+ "original_description": data_.get("orig-desc"),
+ "units": data_.get("units"),
+ "pre_pub_abbreviation": data_.get("pre-pub-abbrev"),
+ "post_pub_abbreviation": data_.get("post-pub-abbrev"),
+ "lab_code": data_.get("labcode"),
+ "submitter": data_.get("submitter"),
+ "owner": data_.get("owner"),
+ "authorized_users": data_.get("authorized-users"),
+ }
+ updated_phenotypes = update(
+ conn, "Phenotype",
+ data=Phenotype(**phenotype_),
+ where=Phenotype(id_=data_.get("phenotype-id")))
+ diff_data = {}
+ if updated_phenotypes:
+ diff_data.update({"Phenotype": diff_from_dict(old={
+ k: data_.get(f"old_{k}") for k, v in phenotype_.items()
+ if v is not None}, new=phenotype_)})
+ publication_ = {
+ "abstract": data_.get("abstract"),
+ "authors": data_.get("authors"),
+ "title": data_.get("title"),
+ "journal": data_.get("journal"),
+ "volume": data_.get("volume"),
+ "pages": data_.get("pages"),
+ "month": data_.get("month"),
+ "year": data_.get("year")
+ }
+ updated_publications = update(
+ conn, "Publication",
+ data=Publication(**publication_),
+ where=Publication(id_=data_.get("pubmed-id",
+ data_.get("old_id_"))))
+ if updated_publications:
+ diff_data.update({"Publication": diff_from_dict(old={
+ k: data_.get(f"old_{k}") for k, v in publication_.items()
+ if v is not None}, new=publication_)})
+ if diff_data:
+ diff_data.update({"dataset_id": name})
+ diff_data.update({"resource_id": request.args.get('resource-id')})
+ diff_data.update({"author": author})
+ diff_data.update({"timestamp": datetime.datetime.now().strftime(
+ "%Y-%m-%d %H:%M:%S")})
+ insert(conn,
+ table="metadata_audit",
+ data=MetadataAudit(dataset_id=name,
+ editor=author,
+ json_data=json.dumps(diff_data)))
+ flash(f"Diff-data: \n{diff_data}\nhas been uploaded", "success")
+ return redirect(f"/datasets/{dataset_id}/traits/{name}"
+ f"?resource-id={request.args.get('resource-id')}")
+
+
+@metadata_edit.route("/traits/<name>", methods=("POST",))
+@edit_access_required
+def update_probeset(name: str):
+ conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
+ user=current_app.config.get("DB_USER"),
+ passwd=current_app.config.get("DB_PASS"),
+ host=current_app.config.get("DB_HOST"))
+ data_ = request.form.to_dict()
+ probeset_ = {
+ "id_": data_.get("id"),
+ "symbol": data_.get("symbol"),
+ "description": data_.get("description"),
+ "probe_target_description": data_.get("probe_target_description"),
+ "chr_": data_.get("chr"),
+ "mb": data_.get("mb"),
+ "alias": data_.get("alias"),
+ "geneid": data_.get("geneid"),
+ "homologeneid": data_.get("homologeneid"),
+ "unigeneid": data_.get("unigeneid"),
+ "omim": data_.get("OMIM"),
+ "refseq_transcriptid": data_.get("refseq_transcriptid"),
+ "blatseq": data_.get("blatseq"),
+ "targetseq": data_.get("targetseq"),
+ "strand_probe": data_.get("Strand_Probe"),
+ "probe_set_target_region": data_.get("probe_set_target_region"),
+ "probe_set_specificity": data_.get("probe_set_specificity"),
+ "probe_set_blat_score": data_.get("probe_set_blat_score"),
+ "probe_set_blat_mb_start": data_.get("probe_set_blat_mb_start"),
+ "probe_set_blat_mb_end": data_.get("probe_set_blat_mb_end"),
+ "probe_set_strand": data_.get("probe_set_strand"),
+ "probe_set_note_by_rw": data_.get("probe_set_note_by_rw"),
+ "flag": data_.get("flag")
+ }
+ diff_data = {}
+ author = ((g.user_session.record.get(b"user_id") or b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
+ if (updated_probeset := update(
+ conn, "ProbeSet",
+ data=Probeset(**probeset_),
+ where=Probeset(id_=data_.get("id")))):
+ diff_data.update({"Probeset": diff_from_dict(old={
+ k: data_.get(f"old_{k}") for k, v in probeset_.items()
+ if v is not None}, new=probeset_)})
+ if diff_data:
+ diff_data.update({"probeset_name": data_.get("probeset_name")})
+ diff_data.update({"author": author})
+ diff_data.update({"resource_id": request.args.get('resource-id')})
+ diff_data.update({"timestamp": datetime.datetime.now().strftime(
+ "%Y-%m-%d %H:%M:%S")})
+ insert(conn,
+ table="metadata_audit",
+ data=MetadataAudit(dataset_id=data_.get("id"),
+ editor=author,
+ json_data=json.dumps(diff_data)))
+ return redirect(f"/datasets/traits/{name}"
+ f"?resource-id={request.args.get('resource-id')}")
+
diff --git a/wqflask/wqflask/resource_manager.py b/wqflask/wqflask/resource_manager.py
index 3371e59d..e338a22d 100644
--- a/wqflask/wqflask/resource_manager.py
+++ b/wqflask/wqflask/resource_manager.py
@@ -147,8 +147,7 @@ def view_resource(resource_id: str):
access_role=get_user_access_roles(
resource_id=resource_id,
user_id=user_id,
- gn_proxy_url=current_app.config.get("GN2_PROXY")),
- DataRole=DataRole, AdminRole=AdminRole)
+ gn_proxy_url=current_app.config.get("GN2_PROXY")))
@resource_management.route("/resources/<resource_id>/make-public",
diff --git a/wqflask/wqflask/show_trait/show_trait.py b/wqflask/wqflask/show_trait/show_trait.py
index c4d1ae1c..fa1206c9 100644
--- a/wqflask/wqflask/show_trait/show_trait.py
+++ b/wqflask/wqflask/show_trait/show_trait.py
@@ -20,15 +20,16 @@ from base import data_set
from utility import helper_functions
from utility.authentication_tools import check_owner_or_admin
from utility.tools import locate_ignore_error
+from utility.tools import GN_PROXY_URL
from utility.redis_tools import get_redis_conn, get_resource_id
-from utility.logger import getLogger
+from wqflask.access_roles import AdminRole
+from wqflask.access_roles import DataRole
+from wqflask.resource_manager import get_user_access_roles
Redis = get_redis_conn()
ONE_YEAR = 60 * 60 * 24 * 365
-logger = getLogger(__name__)
-
###############################################
#
# Todo: Put in security to ensure that user has permission to access
@@ -38,14 +39,11 @@ logger = getLogger(__name__)
class ShowTrait:
- def __init__(self, kw):
+ def __init__(self, user_id, kw):
if 'trait_id' in kw and kw['dataset'] != "Temp":
self.temp_trait = False
self.trait_id = kw['trait_id']
helper_functions.get_species_dataset_trait(self, kw)
- self.resource_id = get_resource_id(self.dataset, self.trait_id)
- self.admin_status = check_owner_or_admin(
- resource_id=self.resource_id)
elif 'group' in kw:
self.temp_trait = True
self.trait_id = "Temp_" + kw['species'] + "_" + kw['group'] + \
@@ -62,9 +60,6 @@ class ShowTrait:
self.this_trait = create_trait(dataset=self.dataset,
name=self.trait_id,
cellid=None)
-
- self.admin_status = check_owner_or_admin(
- dataset=self.dataset, trait_id=self.trait_id)
else:
self.temp_trait = True
self.trait_id = kw['trait_id']
@@ -75,11 +70,13 @@ class ShowTrait:
self.this_trait = create_trait(dataset=self.dataset,
name=self.trait_id,
cellid=None)
-
self.trait_vals = Redis.get(self.trait_id).split()
- self.admin_status = check_owner_or_admin(
- dataset=self.dataset, trait_id=self.trait_id)
-
+ self.resource_id = get_resource_id(self.dataset,
+ self.trait_id)
+ self.admin_status = get_user_access_roles(
+ user_id=user_id,
+ resource_id=(self.resource_id or ""),
+ gn_proxy_url=GN_PROXY_URL)
# ZS: Get verify/rna-seq link URLs
try:
blatsequence = self.this_trait.blatseq
@@ -525,10 +522,6 @@ class ShowTrait:
sample_group_type='primary',
header="%s Only" % (self.dataset.group.name))
self.sample_groups = (primary_samples,)
- print("\nttttttttttttttttttttttttttttttttttttttttttttt\n")
- print(self.sample_groups)
- print("\nttttttttttttttttttttttttttttttttttttttttttttt\n")
-
self.primary_sample_names = primary_sample_names
self.dataset.group.allsamples = all_samples_ordered
@@ -614,7 +607,6 @@ def get_nearest_marker(this_trait, this_db):
GenoFreeze.Id = GenoXRef.GenoFreezeId AND
GenoFreeze.Name = '{}'
ORDER BY ABS( Geno.Mb - {}) LIMIT 1""".format(this_chr, this_db.group.name + "Geno", this_mb)
- logger.sql(query)
result = g.db.execute(query).fetchall()
if result == []:
diff --git a/wqflask/wqflask/templates/admin/manage_resource.html b/wqflask/wqflask/templates/admin/manage_resource.html
index 613aa70e..64d4b6eb 100644
--- a/wqflask/wqflask/templates/admin/manage_resource.html
+++ b/wqflask/wqflask/templates/admin/manage_resource.html
@@ -3,29 +3,31 @@
{% block content %}
<!-- Start of body -->
<div class="container">
+ <section>
{{ flash_me() }}
- {% set DATA_ACCESS = access_role.get('data') %}
- {% set METADATA_ACCESS = access_role.get('metadata') %}
- {% set ADMIN_STATUS = access_role.get('admin') %}
- <h1>Resource Manager</h1>
- {% if resource_info.get('owner_id') != 'none'%}
- {% set user_details = resource_info.get('owner_details') %}
- <h3>
- Current Owner: {{ user_details.get('full_name') }}
- </h3>
- {% if user_details.get('organization') %}
- <h3>
- Organization: {{ user_details.get('organization')}}
- </h3>
- {% endif %}
- {% if DATA_ACCESS > DataRole.VIEW and ADMIN_STATUS > AdminRole.NOT_ADMIN %}
- <a class="btn btn-danger" target="_blank"
- href="/resource-management/resources/{{ resource_info.get('resource_id') }}/change-owner">
- Change Owner
- </a>
- {% endif %}
- {% endif %}
- </section>
+ {% set DATA_ACCESS = access_role.get('data') %}
+ {% set METADATA_ACCESS = access_role.get('metadata') %}
+ {% set ADMIN_STATUS = access_role.get('admin') %}
+ {% set ADMIN_STATUS = access_role.get('admin') %}
+ <h1>Resource Manager</h1>
+ {% if resource_info.get('owner_id') %}
+ {% set user_details = resource_info.get('owner_details') %}
+ <h3>
+ Current Owner: {{ user_details.get('full_name') }}
+ </h3>
+ {% if user_details.get('organization') %}
+ <h3>
+ Organization: {{ user_details.get('organization')}}
+ </h3>
+ {% endif %}
+ {% if DATA_ACCESS > DataRole.VIEW and ADMIN_STATUS > AdminRole.NOT_ADMIN %}
+ <a class="btn btn-danger" target="_blank"
+ href="/resource-management/resources/{{ resource_info.get('resource_id') }}/change-owner">
+ Change Owner
+ </a>
+ {% endif %}
+ {% endif %}
+ </section>
<section class="container" style="margin-top: 2em;">
<form class="container-fluid" action="/resource-management/resources/{{ resource_info.get('resource_id') }}/make-public" method="POST">
@@ -51,7 +53,7 @@
<label class="radio-inline">
<input type="radio" name="open_to_public" value="False" {{ 'checked' if not is_open_to_public }}>
No
- </label>
+ </label>
</div>
</div>
<div class="form-group" style="padding-left: 20px;">
@@ -98,25 +100,25 @@
</div>
{% endif %}
</form>
- </section>
+ </section>
-<!-- End of body -->
+ <!-- End of body -->
-{% endblock %}
-{% block js %}
+ {% endblock %}
+ {% block js %}
<script language="javascript" type="text/javascript" src="{{ url_for('js', filename='DataTables/js/jquery.dataTables.min.js') }}"></script>
<script type="text/javascript" charset="utf-8">
- $('#add_group_to_resource, #save_changes, #change_owner').click(function(){
- url = $(this).data("url");
- $('#manage_resource').attr("action", url)
- $('#manage_resource').submit()
- })
+ $('#add_group_to_resource, #save_changes, #change_owner').click(function(){
+ url = $(this).data("url");
+ $('#manage_resource').attr("action", url)
+ $('#manage_resource').submit()
+ })
- {% if group_masks|length > 0 %}
- $('#groups_table').dataTable({
- 'sDom': 'tr',
- });
- {% endif %}
+ {% if group_masks|length > 0 %}
+ $('#groups_table').dataTable({
+ 'sDom': 'tr',
+ });
+ {% endif %}
</script>
-{% endblock %}
+ {% endblock %}
diff --git a/wqflask/wqflask/templates/edit_phenotype.html b/wqflask/wqflask/templates/edit_phenotype.html
index 7a841793..c3cde391 100644
--- a/wqflask/wqflask/templates/edit_phenotype.html
+++ b/wqflask/wqflask/templates/edit_phenotype.html
@@ -62,8 +62,7 @@
</div>
{% endif %}
-
-<form id="edit-form" class="form-horizontal" method="post" action="/trait/update" enctype=multipart/form-data>
+<form id="edit-form" class="form-horizontal" method="post" action="/datasets/{{dataset_id}}/traits/{{ publish_xref.id_ }}?resource-id={{ resource_id }}" enctype='multipart/form-data'>
<h2 class="text-center">Trait Information:</h2>
<div class="form-group">
<label for="pubmed-id" class="col-sm-2 control-label">Pubmed ID:</label>
@@ -226,7 +225,6 @@
<input type = "file" class="col-sm-4 control-label" name = "file" />
</div>
<div class="controls center-block" style="width: max-content;">
- <input name="dataset-name" class="changed" type="hidden" value="{{ publish_xref.id_ }}"/>
<input name="inbred-set-id" class="changed" type="hidden" value="{{ publish_xref.inbred_set_id }}"/>
<input name="phenotype-id" class="changed" type="hidden" value="{{ publish_xref.phenotype_id }}"/>
<input name="comments" class="changed" type="hidden" value="{{ publish_xref.comments }}"/>
diff --git a/wqflask/wqflask/templates/edit_probeset.html b/wqflask/wqflask/templates/edit_probeset.html
index 85d49561..ab91b701 100644
--- a/wqflask/wqflask/templates/edit_probeset.html
+++ b/wqflask/wqflask/templates/edit_probeset.html
@@ -9,52 +9,52 @@ Submit Trait | Reset
<div class="container">
<details class="col-sm-12 col-md-10 col-lg-12">
- <summary>
- <h2>Update History</h2>
- </summary>
- <table class="table">
- <tbody>
- <tr>
- <th>Timestamp</th>
- <th>Editor</th>
- <th>Field</th>
- <th>Diff</th>
- </tr>
- {% set ns = namespace(display_cell=True) %}
+ <summary>
+ <h2>Update History</h2>
+ </summary>
+ <table class="table">
+ <tbody>
+ <tr>
+ <th>Timestamp</th>
+ <th>Editor</th>
+ <th>Field</th>
+ <th>Diff</th>
+ </tr>
+ {% set ns = namespace(display_cell=True) %}
- {% for timestamp, group in diff %}
- {% set ns.display_cell = True %}
- {% for i in group %}
- <tr>
- {% if ns.display_cell and i.timestamp == timestamp %}
+ {% for timestamp, group in diff %}
+ {% set ns.display_cell = True %}
+ {% for i in group %}
+ <tr>
+ {% if ns.display_cell and i.timestamp == timestamp %}
- {% set author = i.author %}
- {% set timestamp_ = i.timestamp %}
+ {% set author = i.author %}
+ {% set timestamp_ = i.timestamp %}
- {% else %}
+ {% else %}
- {% set author = "" %}
- {% set timestamp_ = "" %}
+ {% set author = "" %}
+ {% set timestamp_ = "" %}
- {% endif %}
- <td>{{ timestamp_ }}</td>
- <td>{{ author }}</td>
- <td>{{ i.diff.field }}</td>
- <td><pre>{{ i.diff.diff }}</pre></td>
- {% set ns.display_cell = False %}
- </tr>
- {% endfor %}
- {% endfor %}
- </tbody>
- </table>
+ {% endif %}
+ <td>{{ timestamp_ }}</td>
+ <td>{{ author }}</td>
+ <td>{{ i.diff.field }}</td>
+ <td><pre>{{ i.diff.diff }}</pre></td>
+ {% set ns.display_cell = False %}
+ </tr>
+ {% endfor %}
+ {% endfor %}
+ </tbody>
+ </table>
</details>
</div>
{% endif %}
-<form id="edit-form" class="form-horizontal" method="post" action="/probeset/update">
- <h2 class="text-center">Probeset Information:</h2>
+<form id="edit-form" class="form-horizontal" method="post" action="/datasets/traits/{{ name }}?resource-id={{ resource_id }}">
+ <h2 class="text-center">Probeset Information:</h2>
<div class="form-group">
<label for="symbol" class="col-sm-2 control-label">Symbol:</label>
<div class="col-sm-4">
diff --git a/wqflask/wqflask/templates/show_trait_details.html b/wqflask/wqflask/templates/show_trait_details.html
index 2a21dd24..3e59a3ee 100644
--- a/wqflask/wqflask/templates/show_trait_details.html
+++ b/wqflask/wqflask/templates/show_trait_details.html
@@ -234,16 +234,16 @@
{% endif %}
{% endif %}
<button type="button" id="view_in_gn1" class="btn btn-primary" title="View Trait in GN1" onclick="window.open('http://gn1.genenetwork.org/webqtl/main.py?cmd=show&db={{ this_trait.dataset.name }}&probeset={{ this_trait.name }}', '_blank')">Go to GN1</button>
- {% if admin_status == "owner" or admin_status == "edit-admins" or admin_status == "edit-access" %}
+ {% if admin_status.get('metadata', DataRole.VIEW) > DataRole.VIEW %}
{% if this_trait.dataset.type == 'Publish' %}
- <button type="button" id="edit_resource" class="btn btn-success" title="Edit Resource" onclick="window.open('/trait/{{ this_trait.name }}/edit/inbredset-id/{{ this_trait.dataset.id }}', '_blank')">Edit</button>
+ <button type="button" id="edit_resource" class="btn btn-success" title="Edit Resource" onclick="window.open('/datasets/{{ this_trait.dataset.id }}/traits/{{ this_trait.name }}?resource-id={{ resource_id }}', '_blank')">Edit</button>
{% endif %}
{% if this_trait.dataset.type == 'ProbeSet' %}
- <button type="button" id="edit_resource" class="btn btn-success" title="Edit Resource" onclick="window.open('/trait/edit/probeset-name/{{ this_trait.name }}', '_blank')">Edit</button>
+ <button type="button" id="edit_resource" class="btn btn-success" title="Edit Resource" onclick="window.open('/datasets/traits/{{ this_trait.name }}?resource-id={{ resource_id }}', '_blank')">Edit</button>
{% endif %}
- {% if admin_status == "owner" or admin_status == "edit-admins" or admin_status == "edit-access" %}
- <button type="button" id="edit_resource" class="btn btn-success" title="Edit Resource" onclick="window.open('./resources/manage?resource_id={{ resource_id }}', '_blank')">Edit Privileges</button>
+ {% if admin_status.get('metadata', DataRole.VIEW) > DataRole.VIEW %}
+ <button type="button" id="edit_resource" class="btn btn-success" title="Edit Privileges" onclick="window.open('/resource-management/resources/{{ resource_id }}', '_blank')">Edit Privileges</button>
{% endif %}
{% endif %}
</div>
diff --git a/wqflask/wqflask/user_session.py b/wqflask/wqflask/user_session.py
index 67e2e158..d3c4a62f 100644
--- a/wqflask/wqflask/user_session.py
+++ b/wqflask/wqflask/user_session.py
@@ -10,7 +10,6 @@ from flask import (Flask, g, render_template, url_for, request, make_response,
from wqflask import app
from utility import hmac
-#from utility.elasticsearch_tools import get_elasticsearch_connection
from utility.redis_tools import get_redis_conn, get_user_id, get_user_by_unique_column, set_user_attribute, get_user_collections, save_collections
Redis = get_redis_conn()
diff --git a/wqflask/wqflask/views.py b/wqflask/wqflask/views.py
index b0da1f21..220d9b87 100644
--- a/wqflask/wqflask/views.py
+++ b/wqflask/wqflask/views.py
@@ -4,7 +4,6 @@ import MySQLdb
import array
import base64
import csv
-import difflib
import datetime
import flask
import io # Todo: Use cStringIO?
@@ -20,8 +19,6 @@ import traceback
import uuid
import xlsxwriter
-from itertools import groupby
-from collections import namedtuple
from zipfile import ZipFile
from zipfile import ZIP_DEFLATED
@@ -30,19 +27,12 @@ from wqflask import app
from gn3.commands import run_cmd
from gn3.computations.gemma import generate_hash_of_string
from gn3.db import diff_from_dict
-from gn3.db import fetchall
-from gn3.db import fetchone
from gn3.db import insert
from gn3.db import update
from gn3.db.metadata_audit import MetadataAudit
from gn3.db.phenotypes import Phenotype
from gn3.db.phenotypes import Probeset
from gn3.db.phenotypes import Publication
-from gn3.db.phenotypes import PublishXRef
-from gn3.db.phenotypes import probeset_mapping
-# from gn3.db.traits import get_trait_csv_sample_data
-# from gn3.db.traits import update_sample_data
-
from flask import current_app
from flask import g
@@ -426,289 +416,6 @@ def submit_trait_form():
version=GN_VERSION)
-@app.route("/trait/<name>/edit/inbredset-id/<inbredset_id>")
-@edit_access_required
-def edit_phenotype(name, inbredset_id):
- conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
- user=current_app.config.get("DB_USER"),
- passwd=current_app.config.get("DB_PASS"),
- host=current_app.config.get("DB_HOST"))
- publish_xref = fetchone(
- conn=conn,
- table="PublishXRef",
- where=PublishXRef(id_=name,
- inbred_set_id=inbredset_id))
- phenotype_ = fetchone(
- conn=conn,
- table="Phenotype",
- where=Phenotype(id_=publish_xref.phenotype_id))
- publication_ = fetchone(
- conn=conn,
- table="Publication",
- where=Publication(id_=publish_xref.publication_id))
- json_data = fetchall(
- conn,
- "metadata_audit",
- where=MetadataAudit(dataset_id=publish_xref.id_))
-
- Edit = namedtuple("Edit", ["field", "old", "new", "diff"])
- Diff = namedtuple("Diff", ["author", "diff", "timestamp"])
- diff_data = []
- for data in json_data:
- json_ = json.loads(data.json_data)
- timestamp = json_.get("timestamp")
- author = json_.get("author")
- for key, value in json_.items():
- if isinstance(value, dict):
- for field, data_ in value.items():
- diff_data.append(
- Diff(author=author,
- diff=Edit(field,
- data_.get("old"),
- data_.get("new"),
- "\n".join(difflib.ndiff(
- [data_.get("old")],
- [data_.get("new")]))),
- timestamp=timestamp))
- diff_data_ = None
- if len(diff_data) > 0:
- diff_data_ = groupby(diff_data, lambda x: x.timestamp)
- return render_template(
- "edit_phenotype.html",
- diff=diff_data_,
- publish_xref=publish_xref,
- phenotype=phenotype_,
- publication=publication_,
- version=GN_VERSION,
- )
-
-
-@app.route("/trait/edit/probeset-name/<dataset_name>")
-@edit_access_required
-def edit_probeset(dataset_name):
- conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
- user=current_app.config.get("DB_USER"),
- passwd=current_app.config.get("DB_PASS"),
- host=current_app.config.get("DB_HOST"))
- probeset_ = fetchone(conn=conn,
- table="ProbeSet",
- columns=list(probeset_mapping.values()),
- where=Probeset(name=dataset_name))
- json_data = fetchall(
- conn,
- "metadata_audit",
- where=MetadataAudit(dataset_id=probeset_.id_))
- Edit = namedtuple("Edit", ["field", "old", "new", "diff"])
- Diff = namedtuple("Diff", ["author", "diff", "timestamp"])
- diff_data = []
- for data in json_data:
- json_ = json.loads(data.json_data)
- timestamp = json_.get("timestamp")
- author = json_.get("author")
- for key, value in json_.items():
- if isinstance(value, dict):
- for field, data_ in value.items():
- diff_data.append(
- Diff(author=author,
- diff=Edit(field,
- data_.get("old"),
- data_.get("new"),
- "\n".join(difflib.ndiff(
- [data_.get("old")],
- [data_.get("new")]))),
- timestamp=timestamp))
- diff_data_ = None
- if len(diff_data) > 0:
- diff_data_ = groupby(diff_data, lambda x: x.timestamp)
- return render_template(
- "edit_probeset.html",
- diff=diff_data_,
- probeset=probeset_)
-
-
-@app.route("/trait/update", methods=["POST"])
-@edit_access_required
-def update_phenotype():
- conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
- user=current_app.config.get("DB_USER"),
- passwd=current_app.config.get("DB_PASS"),
- host=current_app.config.get("DB_HOST"))
- data_ = request.form.to_dict()
- TMPDIR = current_app.config.get("TMPDIR")
- author = g.user_session.record.get(b'user_name')
- if 'file' not in request.files:
- flash("No sample-data has been uploaded", "warning")
- else:
- file_ = request.files['file']
- trait_name = str(data_.get('dataset-name'))
- phenotype_id = str(data_.get('phenotype-id', 35))
- SAMPLE_DATADIR = os.path.join(TMPDIR, "sample-data")
- if not os.path.exists(SAMPLE_DATADIR):
- os.makedirs(SAMPLE_DATADIR)
- if not os.path.exists(os.path.join(SAMPLE_DATADIR,
- "diffs")):
- os.makedirs(os.path.join(SAMPLE_DATADIR,
- "diffs"))
- if not os.path.exists(os.path.join(SAMPLE_DATADIR,
- "updated")):
- os.makedirs(os.path.join(SAMPLE_DATADIR,
- "updated"))
- current_time = str(datetime.datetime.now().isoformat())
- new_file_name = (os.path.join(TMPDIR,
- "sample-data/updated/",
- (f"{author.decode('utf-8')}."
- f"{trait_name}.{phenotype_id}."
- f"{current_time}.csv")))
- uploaded_file_name = (os.path.join(
- TMPDIR,
- "sample-data/updated/",
- (f"updated.{author.decode('utf-8')}."
- f"{trait_name}.{phenotype_id}."
- f"{current_time}.csv")))
- file_.save(new_file_name)
- publishdata_id = ""
- lines = []
- with open(new_file_name, "r") as f:
- lines = f.read()
- first_line = lines.split('\n', 1)[0]
- publishdata_id = first_line.split("Id:")[-1].strip()
- with open(new_file_name, "w") as f:
- f.write(lines.split("\n\n")[-1])
- csv_ = get_trait_csv_sample_data(conn=conn,
- trait_name=str(trait_name),
- phenotype_id=str(phenotype_id))
- with open(uploaded_file_name, "w") as f_:
- f_.write(csv_.split("\n\n")[-1])
- r = run_cmd(cmd=("csvdiff "
- f"'{uploaded_file_name}' '{new_file_name}' "
- "--format json"))
- diff_output = (f"{TMPDIR}/sample-data/diffs/"
- f"{trait_name}.{author.decode('utf-8')}."
- f"{phenotype_id}.{current_time}.json")
- with open(diff_output, "w") as f:
- dict_ = json.loads(r.get("output"))
- dict_.update({
- "author": author.decode('utf-8'),
- "publishdata_id": publishdata_id,
- "dataset_id": data_.get("dataset-name"),
- "timestamp": datetime.datetime.now().strftime(
- "%Y-%m-%d %H:%M:%S")
- })
- f.write(json.dumps(dict_))
- flash("Sample-data has been successfully uploaded", "success")
- # Run updates:
- phenotype_ = {
- "pre_pub_description": data_.get("pre-pub-desc"),
- "post_pub_description": data_.get("post-pub-desc"),
- "original_description": data_.get("orig-desc"),
- "units": data_.get("units"),
- "pre_pub_abbreviation": data_.get("pre-pub-abbrev"),
- "post_pub_abbreviation": data_.get("post-pub-abbrev"),
- "lab_code": data_.get("labcode"),
- "submitter": data_.get("submitter"),
- "owner": data_.get("owner"),
- "authorized_users": data_.get("authorized-users"),
- }
- updated_phenotypes = update(
- conn, "Phenotype",
- data=Phenotype(**phenotype_),
- where=Phenotype(id_=data_.get("phenotype-id")))
- diff_data = {}
- if updated_phenotypes:
- diff_data.update({"Phenotype": diff_from_dict(old={
- k: data_.get(f"old_{k}") for k, v in phenotype_.items()
- if v is not None}, new=phenotype_)})
- publication_ = {
- "abstract": data_.get("abstract"),
- "authors": data_.get("authors"),
- "title": data_.get("title"),
- "journal": data_.get("journal"),
- "volume": data_.get("volume"),
- "pages": data_.get("pages"),
- "month": data_.get("month"),
- "year": data_.get("year")
- }
- updated_publications = update(
- conn, "Publication",
- data=Publication(**publication_),
- where=Publication(id_=data_.get("pubmed-id",
- data_.get("old_id_"))))
- if updated_publications:
- diff_data.update({"Publication": diff_from_dict(old={
- k: data_.get(f"old_{k}") for k, v in publication_.items()
- if v is not None}, new=publication_)})
- if diff_data:
- diff_data.update({"dataset_id": data_.get("dataset-name")})
- diff_data.update({"author": author.decode('utf-8')})
- diff_data.update({"timestamp": datetime.datetime.now().strftime(
- "%Y-%m-%d %H:%M:%S")})
- insert(conn,
- table="metadata_audit",
- data=MetadataAudit(dataset_id=data_.get("dataset-name"),
- editor=author.decode("utf-8"),
- json_data=json.dumps(diff_data)))
- flash(f"Diff-data: \n{diff_data}\nhas been uploaded", "success")
- return redirect(f"/trait/{data_.get('dataset-name')}"
- f"/edit/inbredset-id/{data_.get('inbred-set-id')}")
-
-
-@app.route("/probeset/update", methods=["POST"])
-@edit_access_required
-def update_probeset():
- conn = MySQLdb.Connect(db=current_app.config.get("DB_NAME"),
- user=current_app.config.get("DB_USER"),
- passwd=current_app.config.get("DB_PASS"),
- host=current_app.config.get("DB_HOST"))
- data_ = request.form.to_dict()
- probeset_ = {
- "id_": data_.get("id"),
- "symbol": data_.get("symbol"),
- "description": data_.get("description"),
- "probe_target_description": data_.get("probe_target_description"),
- "chr_": data_.get("chr"),
- "mb": data_.get("mb"),
- "alias": data_.get("alias"),
- "geneid": data_.get("geneid"),
- "homologeneid": data_.get("homologeneid"),
- "unigeneid": data_.get("unigeneid"),
- "omim": data_.get("OMIM"),
- "refseq_transcriptid": data_.get("refseq_transcriptid"),
- "blatseq": data_.get("blatseq"),
- "targetseq": data_.get("targetseq"),
- "strand_probe": data_.get("Strand_Probe"),
- "probe_set_target_region": data_.get("probe_set_target_region"),
- "probe_set_specificity": data_.get("probe_set_specificity"),
- "probe_set_blat_score": data_.get("probe_set_blat_score"),
- "probe_set_blat_mb_start": data_.get("probe_set_blat_mb_start"),
- "probe_set_blat_mb_end": data_.get("probe_set_blat_mb_end"),
- "probe_set_strand": data_.get("probe_set_strand"),
- "probe_set_note_by_rw": data_.get("probe_set_note_by_rw"),
- "flag": data_.get("flag")
- }
- updated_probeset = update(
- conn, "ProbeSet",
- data=Probeset(**probeset_),
- where=Probeset(id_=data_.get("id")))
-
- diff_data = {}
- author = g.user_session.record.get(b'user_name')
- if updated_probeset:
- diff_data.update({"Probeset": diff_from_dict(old={
- k: data_.get(f"old_{k}") for k, v in probeset_.items()
- if v is not None}, new=probeset_)})
- if diff_data:
- diff_data.update({"probeset_name": data_.get("probeset_name")})
- diff_data.update({"author": author.decode('utf-8')})
- diff_data.update({"timestamp": datetime.datetime.now().strftime(
- "%Y-%m-%d %H:%M:%S")})
- insert(conn,
- table="metadata_audit",
- data=MetadataAudit(dataset_id=data_.get("id"),
- editor=author.decode("utf-8"),
- json_data=json.dumps(diff_data)))
- return redirect(f"/trait/edit/probeset-name/{data_.get('probeset_name')}")
-
-
@app.route("/create_temp_trait", methods=('POST',))
def create_temp_trait():
logger.info(request.url)
@@ -843,8 +550,10 @@ def export_perm_data():
@app.route("/show_temp_trait", methods=('POST',))
def show_temp_trait_page():
- logger.info(request.url)
- template_vars = show_trait.ShowTrait(request.form)
+ user_id = ((g.user_session.record.get(b"user_id") or b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
+ template_vars = show_trait.ShowTrait(user_id=user_id,
+ kw=request.form)
template_vars.js_data = json.dumps(template_vars.js_data,
default=json_default_handler,
indent=" ")
@@ -853,8 +562,10 @@ def show_temp_trait_page():
@app.route("/show_trait")
def show_trait_page():
- logger.info(request.url)
- template_vars = show_trait.ShowTrait(request.args)
+ user_id = ((g.user_session.record.get(b"user_id") or b"").decode("utf-8")
+ or g.user_session.record.get("user_id") or "")
+ template_vars = show_trait.ShowTrait(user_id=user_id,
+ kw=request.args)
template_vars.js_data = json.dumps(template_vars.js_data,
default=json_default_handler,
indent=" ")