aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorFrederick Muriuki Muriithi2023-08-04 10:10:28 +0300
committerFrederick Muriuki Muriithi2023-08-04 10:20:09 +0300
commit8b7c598407a5fea9a3d78473e72df87606998cd4 (patch)
tree8526433a17eca6b511feb082a0574f9b15cb9469
parentf7fcbbcc014686ac597b783a8dcb38b43024b9d6 (diff)
downloadgn-auth-8b7c598407a5fea9a3d78473e72df87606998cd4.tar.gz
Copy over files from GN3 repository.
-rw-r--r--.gitignore6
-rw-r--r--gn_auth/__init__.py0
-rw-r--r--gn_auth/auth/__init__.py5
-rw-r--r--gn_auth/auth/authentication/__init__.py24
-rw-r--r--gn_auth/auth/authentication/exceptions.py4
-rw-r--r--gn_auth/auth/authentication/oauth2/__init__.py0
-rw-r--r--gn_auth/auth/authentication/oauth2/endpoints/__init__.py0
-rw-r--r--gn_auth/auth/authentication/oauth2/endpoints/introspection.py48
-rw-r--r--gn_auth/auth/authentication/oauth2/endpoints/revocation.py22
-rw-r--r--gn_auth/auth/authentication/oauth2/endpoints/utilities.py30
-rw-r--r--gn_auth/auth/authentication/oauth2/grants/__init__.py0
-rw-r--r--gn_auth/auth/authentication/oauth2/grants/authorisation_code_grant.py85
-rw-r--r--gn_auth/auth/authentication/oauth2/grants/password_grant.py22
-rw-r--r--gn_auth/auth/authentication/oauth2/models/__init__.py0
-rw-r--r--gn_auth/auth/authentication/oauth2/models/authorization_code.py93
-rw-r--r--gn_auth/auth/authentication/oauth2/models/oauth2client.py234
-rw-r--r--gn_auth/auth/authentication/oauth2/models/oauth2token.py132
-rw-r--r--gn_auth/auth/authentication/oauth2/resource_server.py19
-rw-r--r--gn_auth/auth/authentication/oauth2/server.py72
-rw-r--r--gn_auth/auth/authentication/oauth2/views.py104
-rw-r--r--gn_auth/auth/authentication/users.py128
-rw-r--r--gn_auth/auth/authorisation/__init__.py2
-rw-r--r--gn_auth/auth/authorisation/checks.py70
-rw-r--r--gn_auth/auth/authorisation/data/__init__.py0
-rw-r--r--gn_auth/auth/authorisation/data/genotypes.py96
-rw-r--r--gn_auth/auth/authorisation/data/mrna.py100
-rw-r--r--gn_auth/auth/authorisation/data/phenotypes.py140
-rw-r--r--gn_auth/auth/authorisation/data/views.py310
-rw-r--r--gn_auth/auth/authorisation/errors.py42
-rw-r--r--gn_auth/auth/authorisation/groups/__init__.py3
-rw-r--r--gn_auth/auth/authorisation/groups/data.py106
-rw-r--r--gn_auth/auth/authorisation/groups/models.py400
-rw-r--r--gn_auth/auth/authorisation/groups/views.py431
-rw-r--r--gn_auth/auth/authorisation/privileges.py47
-rw-r--r--gn_auth/auth/authorisation/resources/__init__.py2
-rw-r--r--gn_auth/auth/authorisation/resources/checks.py47
-rw-r--r--gn_auth/auth/authorisation/resources/models.py579
-rw-r--r--gn_auth/auth/authorisation/resources/views.py272
-rw-r--r--gn_auth/auth/authorisation/roles/__init__.py3
-rw-r--r--gn_auth/auth/authorisation/roles/models.py161
-rw-r--r--gn_auth/auth/authorisation/roles/views.py26
-rw-r--r--gn_auth/auth/authorisation/users/__init__.py0
-rw-r--r--gn_auth/auth/authorisation/users/admin/__init__.py2
-rw-r--r--gn_auth/auth/authorisation/users/admin/ui.py27
-rw-r--r--gn_auth/auth/authorisation/users/admin/views.py230
-rw-r--r--gn_auth/auth/authorisation/users/collections/__init__.py1
-rw-r--r--gn_auth/auth/authorisation/users/collections/models.py269
-rw-r--r--gn_auth/auth/authorisation/users/collections/views.py239
-rw-r--r--gn_auth/auth/authorisation/users/masquerade/__init__.py1
-rw-r--r--gn_auth/auth/authorisation/users/masquerade/models.py67
-rw-r--r--gn_auth/auth/authorisation/users/masquerade/views.py48
-rw-r--r--gn_auth/auth/authorisation/users/models.py66
-rw-r--r--gn_auth/auth/authorisation/users/views.py176
-rw-r--r--gn_auth/auth/db.py78
-rw-r--r--gn_auth/auth/db_utils.py14
-rw-r--r--gn_auth/auth/dictify.py12
-rw-r--r--gn_auth/auth/views.py21
-rw-r--r--migrations/auth/20221103_01_js9ub-initialise-the-auth-entic-oris-ation-database.py19
-rw-r--r--migrations/auth/20221103_02_sGrIs-create-user-credentials-table.py20
-rw-r--r--migrations/auth/20221108_01_CoxYh-create-the-groups-table.py19
-rw-r--r--migrations/auth/20221108_02_wxTr9-create-privileges-table.py18
-rw-r--r--migrations/auth/20221108_03_Pbhb1-create-resource-categories-table.py19
-rw-r--r--migrations/auth/20221108_04_CKcSL-init-data-in-resource-categories-table.py25
-rw-r--r--migrations/auth/20221109_01_HbD5F-add-resource-meta-field-to-resource-categories-field.py17
-rw-r--r--migrations/auth/20221110_01_WtZ1I-create-resources-table.py26
-rw-r--r--migrations/auth/20221110_05_BaNtL-create-roles-table.py19
-rw-r--r--migrations/auth/20221110_06_Pq2kT-create-generic-roles-table.py24
-rw-r--r--migrations/auth/20221110_07_7WGa1-create-role-privileges-table.py29
-rw-r--r--migrations/auth/20221110_08_23psB-add-privilege-category-and-privilege-description-columns-to-privileges-table.py22
-rw-r--r--migrations/auth/20221113_01_7M0hv-enumerate-initial-privileges.py66
-rw-r--r--migrations/auth/20221114_01_n8gsF-create-generic-role-privileges-table.py35
-rw-r--r--migrations/auth/20221114_02_DKKjn-drop-generic-role-tables.py41
-rw-r--r--migrations/auth/20221114_03_PtWjc-create-group-roles-table.py29
-rw-r--r--migrations/auth/20221114_04_tLUzB-initialise-basic-roles.py56
-rw-r--r--migrations/auth/20221114_05_hQun6-create-user-roles-table.py29
-rw-r--r--migrations/auth/20221116_01_nKUmX-add-privileges-to-group-leader-role.py35
-rw-r--r--migrations/auth/20221117_01_RDlfx-modify-group-roles-add-group-role-id.py52
-rw-r--r--migrations/auth/20221117_02_fmuZh-create-group-users-table.py25
-rw-r--r--migrations/auth/20221206_01_BbeF9-create-group-user-roles-on-resources-table.py39
-rw-r--r--migrations/auth/20221208_01_sSdHz-add-public-column-to-resources-table.py16
-rw-r--r--migrations/auth/20221219_01_CI3tN-create-oauth2-clients-table.py25
-rw-r--r--migrations/auth/20221219_02_buSEU-create-oauth2-tokens-table.py31
-rw-r--r--migrations/auth/20221219_03_PcTrb-create-authorisation-code-table.py31
-rw-r--r--migrations/auth/20230111_01_Wd6IZ-remove-create-group-privilege-from-group-leader.py40
-rw-r--r--migrations/auth/20230116_01_KwuJ3-rework-privileges-schema.py111
-rw-r--r--migrations/auth/20230207_01_r0bkZ-create-group-join-requests-table.py29
-rw-r--r--migrations/auth/20230210_01_8xMa1-system-admin-privileges-for-data-distribution.py22
-rw-r--r--migrations/auth/20230210_02_lDK14-create-system-admin-role.py38
-rw-r--r--migrations/auth/20230306_01_pRfxl-add-system-user-list-privilege.py26
-rw-r--r--migrations/auth/20230306_02_7GnRY-add-system-user-list-privilege-to-system-administrator-and-group-leader-roles.py42
-rw-r--r--migrations/auth/20230322_01_0dDZR-create-linked-phenotype-data-table.py30
-rw-r--r--migrations/auth/20230322_02_Ll854-create-phenotype-resources-table.py29
-rw-r--r--migrations/auth/20230404_01_VKxXg-create-linked-genotype-data-table.py29
-rw-r--r--migrations/auth/20230404_02_la33P-create-genotype-resources-table.py29
-rw-r--r--migrations/auth/20230410_01_8mwaf-create-linked-mrna-data-table.py30
-rw-r--r--migrations/auth/20230410_02_WZqSf-create-mrna-resources-table.py28
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/unit/__init__.py0
-rw-r--r--tests/unit/auth/__init__.py0
-rw-r--r--tests/unit/auth/conftest.py24
-rw-r--r--tests/unit/auth/fixtures/__init__.py8
-rw-r--r--tests/unit/auth/fixtures/group_fixtures.py147
-rw-r--r--tests/unit/auth/fixtures/migration_fixtures.py51
-rw-r--r--tests/unit/auth/fixtures/oauth2_client_fixtures.py51
-rw-r--r--tests/unit/auth/fixtures/resource_fixtures.py25
-rw-r--r--tests/unit/auth/fixtures/role_fixtures.py45
-rw-r--r--tests/unit/auth/fixtures/user_fixtures.py66
-rw-r--r--tests/unit/auth/test_credentials.py100
-rw-r--r--tests/unit/auth/test_groups.py168
-rw-r--r--tests/unit/auth/test_migrations_add_data_to_table.py79
-rw-r--r--tests/unit/auth/test_migrations_add_remove_columns.py116
-rw-r--r--tests/unit/auth/test_migrations_create_tables.py91
-rw-r--r--tests/unit/auth/test_migrations_drop_tables.py63
-rw-r--r--tests/unit/auth/test_migrations_indexes.py97
-rw-r--r--tests/unit/auth/test_migrations_init_data_in_resource_categories_table.py60
-rw-r--r--tests/unit/auth/test_migrations_insert_data_into_empty_table.py77
-rw-r--r--tests/unit/auth/test_privileges.py46
-rw-r--r--tests/unit/auth/test_resources.py117
-rw-r--r--tests/unit/auth/test_roles.py123
-rw-r--r--tests/unit/auth/test_token.py62
-rw-r--r--tests/unit/conftest.py35
121 files changed, 8016 insertions, 1 deletions
diff --git a/.gitignore b/.gitignore
index 565bfbc..d64ca2c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,5 @@
-/**/*~ \ No newline at end of file
+# emacs temporary files
+/**/*~
+
+# yoyo configs
+/**/yoyo*.ini \ No newline at end of file
diff --git a/gn_auth/__init__.py b/gn_auth/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/__init__.py
diff --git a/gn_auth/auth/__init__.py b/gn_auth/auth/__init__.py
new file mode 100644
index 0000000..a28498d
--- /dev/null
+++ b/gn_auth/auth/__init__.py
@@ -0,0 +1,5 @@
+"""Top-Level `Auth` module"""
+from . import authorisation
+from . import authentication
+
+from .views import oauth2
diff --git a/gn_auth/auth/authentication/__init__.py b/gn_auth/auth/authentication/__init__.py
new file mode 100644
index 0000000..42ceacb
--- /dev/null
+++ b/gn_auth/auth/authentication/__init__.py
@@ -0,0 +1,24 @@
+"""Handle authentication requests"""
+
+import bcrypt
+
+def credentials_in_database(cursor, email: str, password: str) -> bool:
+ """Check whether credentials are in the database."""
+ if len(email.strip()) == 0 or len(password.strip()) == 0:
+ return False
+
+ cursor.execute(
+ ("SELECT "
+ "users.email, user_credentials.password "
+ "FROM users LEFT JOIN user_credentials "
+ "ON users.user_id = user_credentials.user_id "
+ "WHERE users.email = :email"),
+ {"email": email})
+ results = cursor.fetchall()
+ if len(results) == 0:
+ return False
+
+ assert len(results) == 1, "Expected one row."
+ row = results[0]
+ return (email == row[0] and
+ bcrypt.checkpw(password.encode("utf-8"), row[1]))
diff --git a/gn_auth/auth/authentication/exceptions.py b/gn_auth/auth/authentication/exceptions.py
new file mode 100644
index 0000000..c31e691
--- /dev/null
+++ b/gn_auth/auth/authentication/exceptions.py
@@ -0,0 +1,4 @@
+"""Exceptions for authentication"""
+
+class AuthenticationError(Exception):
+ """Base exception class for `gn3.auth.authentication` package."""
diff --git a/gn_auth/auth/authentication/oauth2/__init__.py b/gn_auth/auth/authentication/oauth2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/__init__.py
diff --git a/gn_auth/auth/authentication/oauth2/endpoints/__init__.py b/gn_auth/auth/authentication/oauth2/endpoints/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/endpoints/__init__.py
diff --git a/gn_auth/auth/authentication/oauth2/endpoints/introspection.py b/gn_auth/auth/authentication/oauth2/endpoints/introspection.py
new file mode 100644
index 0000000..a567363
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/endpoints/introspection.py
@@ -0,0 +1,48 @@
+"""Handle introspection of tokens."""
+import datetime
+from urllib.parse import urlparse
+
+from flask import request as flask_request
+from authlib.oauth2.rfc7662 import (
+ IntrospectionEndpoint as _IntrospectionEndpoint)
+
+from gn3.auth.authentication.oauth2.models.oauth2token import OAuth2Token
+
+from .utilities import query_token as _query_token
+
+def get_token_user_sub(token: OAuth2Token) -> str:# pylint: disable=[unused-argument]
+ """
+ Return the token's subject as defined in
+ https://datatracker.ietf.org/doc/html/rfc7519#section-4.1.2
+ """
+ ## For now a dummy return to prevent issues.
+ return "sub"
+
+class IntrospectionEndpoint(_IntrospectionEndpoint):
+ """Introspect token."""
+ def query_token(self, token_string: str, token_type_hint: str):
+ """Query the token."""
+ return _query_token(self, token_string, token_type_hint)
+
+ def introspect_token(self, token: OAuth2Token) -> dict:# pylint: disable=[no-self-use]
+ """Return the introspection information."""
+ url = urlparse(flask_request.url)
+ return {
+ "active": True,
+ "scope": token.get_scope(),
+ "client_id": token.client.client_id,
+ "username": token.user.name,
+ "token_type": token.token_type,
+ "exp": int(token.expires_at.timestamp()),
+ "iat": int(token.issued_at.timestamp()),
+ "nbf": int(
+ (token.issued_at - datetime.timedelta(seconds=120)).timestamp()),
+ # "sub": get_token_user_sub(token),
+ "aud": token.client.client_id,
+ "iss": f"{url.scheme}://{url.netloc}",
+ "jti": token.token_id
+ }
+
+ def check_permission(self, token, client, request):# pylint: disable=[unused-argument, no-self-use]
+ """Check that the client has permission to introspect token."""
+ return client.client_type == "internal"
diff --git a/gn_auth/auth/authentication/oauth2/endpoints/revocation.py b/gn_auth/auth/authentication/oauth2/endpoints/revocation.py
new file mode 100644
index 0000000..b8517b6
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/endpoints/revocation.py
@@ -0,0 +1,22 @@
+"""Handle token revocation."""
+
+from flask import current_app
+from authlib.oauth2.rfc7009 import RevocationEndpoint as _RevocationEndpoint
+
+from gn3.auth import db
+from gn3.auth.authentication.oauth2.models.oauth2token import (
+ save_token, OAuth2Token, revoke_token)
+
+from .utilities import query_token as _query_token
+
+class RevocationEndpoint(_RevocationEndpoint):
+ """Revoke the tokens"""
+ ENDPOINT_NAME = "revoke"
+ def query_token(self, token_string: str, token_type_hint: str):
+ """Query the token."""
+ return _query_token(self, token_string, token_type_hint)
+
+ def revoke_token(self, token: OAuth2Token, request):
+ """Revoke token `token`."""
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ save_token(conn, revoke_token(token))
diff --git a/gn_auth/auth/authentication/oauth2/endpoints/utilities.py b/gn_auth/auth/authentication/oauth2/endpoints/utilities.py
new file mode 100644
index 0000000..299f151
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/endpoints/utilities.py
@@ -0,0 +1,30 @@
+"""endpoint utilities"""
+from typing import Any, Optional
+
+from flask import current_app
+from pymonad.maybe import Nothing
+
+from gn3.auth import db
+from gn3.auth.authentication.oauth2.models.oauth2token import (
+ OAuth2Token, token_by_access_token, token_by_refresh_token)
+
+def query_token(# pylint: disable=[unused-argument]
+ endpoint_object: Any, token_str: str, token_type_hint) -> Optional[
+ OAuth2Token]:
+ """Retrieve the token from the database."""
+ __identity__ = lambda val: val
+ token = Nothing
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ if token_type_hint == "access_token":
+ token = token_by_access_token(conn, token_str)
+ if token_type_hint == "access_token":
+ token = token_by_refresh_token(conn, token_str)
+
+ return token.maybe(
+ token_by_access_token(conn, token_str).maybe(
+ token_by_refresh_token(conn, token_str).maybe(
+ None, __identity__),
+ __identity__),
+ __identity__)
+
+ return None
diff --git a/gn_auth/auth/authentication/oauth2/grants/__init__.py b/gn_auth/auth/authentication/oauth2/grants/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/grants/__init__.py
diff --git a/gn_auth/auth/authentication/oauth2/grants/authorisation_code_grant.py b/gn_auth/auth/authentication/oauth2/grants/authorisation_code_grant.py
new file mode 100644
index 0000000..f80d02e
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/grants/authorisation_code_grant.py
@@ -0,0 +1,85 @@
+"""Classes and function for Authorisation Code flow."""
+import uuid
+import string
+import random
+from typing import Optional
+from datetime import datetime
+
+from flask import current_app as app
+from authlib.oauth2.rfc6749 import grants
+from authlib.oauth2.rfc7636 import create_s256_code_challenge
+
+from gn3.auth import db
+from gn3.auth.db_utils import with_db_connection
+from gn3.auth.authentication.users import User
+
+from ..models.oauth2client import OAuth2Client
+from ..models.authorization_code import (
+ AuthorisationCode, authorisation_code, save_authorisation_code)
+
+class AuthorisationCodeGrant(grants.AuthorizationCodeGrant):
+ """Implement the 'Authorisation Code' grant."""
+ TOKEN_ENDPOINT_AUTH_METHODS: list[str] = [
+ "client_secret_basic", "client_secret_post"]
+ AUTHORIZATION_CODE_LENGTH: int = 48
+ TOKEN_ENDPOINT_HTTP_METHODS = ['POST']
+ GRANT_TYPE = "authorization_code"
+ RESPONSE_TYPES = {'code'}
+
+ def save_authorization_code(self, code, request):
+ """Persist the authorisation code to database."""
+ client = request.client
+ nonce = "".join(random.sample(string.ascii_letters + string.digits,
+ k=self.AUTHORIZATION_CODE_LENGTH))
+ return __save_authorization_code__(AuthorisationCode(
+ uuid.uuid4(), code, client, request.redirect_uri, request.scope,
+ nonce, int(datetime.now().timestamp()),
+ create_s256_code_challenge(app.config["SECRET_KEY"]),
+ "S256", request.user))
+
+ def query_authorization_code(self, code, client):
+ """Retrieve the code from the database."""
+ return __query_authorization_code__(code, client)
+
+ def delete_authorization_code(self, authorization_code):# pylint: disable=[no-self-use]
+ """Delete the authorisation code."""
+ with db.connection(app.config["AUTH_DB"]) as conn:
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "DELETE FROM authorisation_code WHERE code_id=?",
+ (str(authorization_code.code_id),))
+
+ def authenticate_user(self, authorization_code) -> Optional[User]:
+ """Authenticate the user who own the authorisation code."""
+ query = (
+ "SELECT users.* FROM authorisation_code LEFT JOIN users "
+ "ON authorisation_code.user_id=users.user_id "
+ "WHERE authorisation_code.code=?")
+ with db.connection(app.config["AUTH_DB"]) as conn:
+ with db.cursor(conn) as cursor:
+ cursor.execute(query, (str(authorization_code.code),))
+ res = cursor.fetchone()
+ if res:
+ return User(
+ uuid.UUID(res["user_id"]), res["email"], res["name"])
+
+ return None
+
+def __query_authorization_code__(
+ code: str, client: OAuth2Client) -> AuthorisationCode:
+ """A helper function that creates a new database connection.
+
+ This is found to be necessary since the `AuthorizationCodeGrant` class(es)
+ do not have a way to pass the database connection."""
+ def __auth_code__(conn) -> str:
+ the_code = authorisation_code(conn, code, client)
+ return the_code.maybe(None, lambda cde: cde) # type: ignore[misc, arg-type, return-value]
+
+ return with_db_connection(__auth_code__)
+
+def __save_authorization_code__(code: AuthorisationCode) -> AuthorisationCode:
+ """A helper function that creates a new database connection.
+
+ This is found to be necessary since the `AuthorizationCodeGrant` class(es)
+ do not have a way to pass the database connection."""
+ return with_db_connection(lambda conn: save_authorisation_code(conn, code))
diff --git a/gn_auth/auth/authentication/oauth2/grants/password_grant.py b/gn_auth/auth/authentication/oauth2/grants/password_grant.py
new file mode 100644
index 0000000..3233877
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/grants/password_grant.py
@@ -0,0 +1,22 @@
+"""Allows users to authenticate directly."""
+
+from flask import current_app as app
+from authlib.oauth2.rfc6749 import grants
+
+from gn3.auth import db
+from gn3.auth.authentication.users import valid_login, user_by_email
+
+from gn3.auth.authorisation.errors import NotFoundError
+
+class PasswordGrant(grants.ResourceOwnerPasswordCredentialsGrant):
+ """Implement the 'Password' grant."""
+ TOKEN_ENDPOINT_AUTH_METHODS = ["client_secret_basic", "client_secret_post"]
+
+ def authenticate_user(self, username, password):
+ "Authenticate the user with their username and password."
+ with db.connection(app.config["AUTH_DB"]) as conn:
+ try:
+ user = user_by_email(conn, username)
+ return user if valid_login(conn, user, password) else None
+ except NotFoundError as _nfe:
+ return None
diff --git a/gn_auth/auth/authentication/oauth2/models/__init__.py b/gn_auth/auth/authentication/oauth2/models/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/models/__init__.py
diff --git a/gn_auth/auth/authentication/oauth2/models/authorization_code.py b/gn_auth/auth/authentication/oauth2/models/authorization_code.py
new file mode 100644
index 0000000..f282814
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/models/authorization_code.py
@@ -0,0 +1,93 @@
+"""Model and functions for handling the Authorisation Code"""
+from uuid import UUID
+from datetime import datetime
+from typing import NamedTuple
+
+from pymonad.maybe import Just, Maybe, Nothing
+
+from gn3.auth import db
+
+from .oauth2client import OAuth2Client
+
+from ...users import User, user_by_id
+
+__5_MINUTES__ = 300 # in seconds
+
+class AuthorisationCode(NamedTuple):
+ """
+ The AuthorisationCode model for the auth(entic|oris)ation system.
+ """
+ # Instance variables
+ code_id: UUID
+ code: str
+ client: OAuth2Client
+ redirect_uri: str
+ scope: str
+ nonce: str
+ auth_time: int
+ code_challenge: str
+ code_challenge_method: str
+ user: User
+
+ @property
+ def response_type(self) -> str:
+ """
+ For authorisation code flow, the response_type type MUST always be
+ 'code'.
+ """
+ return "code"
+
+ def is_expired(self):
+ """Check whether the code is expired."""
+ return self.auth_time + __5_MINUTES__ < datetime.now().timestamp()
+
+ def get_redirect_uri(self):
+ """Get the redirect URI"""
+ return self.redirect_uri
+
+ def get_scope(self):
+ """Return the assigned scope for this AuthorisationCode."""
+ return self.scope
+
+ def get_nonce(self):
+ """Get the one-time use token."""
+ return self.nonce
+
+def authorisation_code(conn: db.DbConnection ,
+ code: str,
+ client: OAuth2Client) -> Maybe[AuthorisationCode]:
+ """
+ Retrieve the authorisation code object that corresponds to `code` and the
+ given OAuth2 client.
+ """
+ with db.cursor(conn) as cursor:
+ query = ("SELECT * FROM authorisation_code "
+ "WHERE code=:code AND client_id=:client_id")
+ cursor.execute(
+ query, {"code": code, "client_id": str(client.client_id)})
+ result = cursor.fetchone()
+ if result:
+ return Just(AuthorisationCode(
+ UUID(result["code_id"]), result["code"], client,
+ result["redirect_uri"], result["scope"], result["nonce"],
+ int(result["auth_time"]), result["code_challenge"],
+ result["code_challenge_method"],
+ user_by_id(conn, UUID(result["user_id"]))))
+ return Nothing
+
+def save_authorisation_code(conn: db.DbConnection,
+ auth_code: AuthorisationCode) -> AuthorisationCode:
+ """Persist the `auth_code` into the database."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "INSERT INTO authorisation_code VALUES("
+ ":code_id, :code, :client_id, :redirect_uri, :scope, :nonce, "
+ ":auth_time, :code_challenge, :code_challenge_method, :user_id"
+ ")",
+ {
+ **auth_code._asdict(),
+ "code_id": str(auth_code.code_id),
+ "client_id": str(auth_code.client.client_id),
+ "user_id": str(auth_code.user.user_id)
+ })
+ return auth_code
diff --git a/gn_auth/auth/authentication/oauth2/models/oauth2client.py b/gn_auth/auth/authentication/oauth2/models/oauth2client.py
new file mode 100644
index 0000000..2a307e3
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/models/oauth2client.py
@@ -0,0 +1,234 @@
+"""OAuth2 Client model."""
+import json
+import datetime
+from uuid import UUID
+from typing import Sequence, Optional, NamedTuple
+
+from pymonad.maybe import Just, Maybe, Nothing
+
+from gn3.auth import db
+from gn3.auth.authentication.users import User, users, user_by_id, same_password
+
+from gn3.auth.authorisation.errors import NotFoundError
+
+class OAuth2Client(NamedTuple):
+ """
+ Client to the OAuth2 Server.
+
+ This is defined according to the mixin at
+ https://docs.authlib.org/en/latest/specs/rfc6749.html#authlib.oauth2.rfc6749.ClientMixin
+ """
+ client_id: UUID
+ client_secret: str
+ client_id_issued_at: datetime.datetime
+ client_secret_expires_at: datetime.datetime
+ client_metadata: dict
+ user: User
+
+ def check_client_secret(self, client_secret: str) -> bool:
+ """Check whether the `client_secret` matches this client."""
+ return same_password(client_secret, self.client_secret)
+
+ @property
+ def token_endpoint_auth_method(self) -> str:
+ """Return the token endpoint authorisation method."""
+ return self.client_metadata.get("token_endpoint_auth_method", ["none"])
+
+ @property
+ def client_type(self) -> str:
+ """
+ Return the token endpoint authorisation method.
+
+ Acceptable client types:
+ * public: Unable to use registered client secrets, e.g. browsers, apps
+ on mobile devices.
+ * confidential: able to securely authenticate with authorisation server
+ e.g. being able to keep their registered client secret safe.
+ """
+ return self.client_metadata.get("client_type", "public")
+
+ def check_endpoint_auth_method(self, method: str, endpoint: str) -> bool:
+ """
+ Check if the client supports the given method for the given endpoint.
+
+ Acceptable methods:
+ * none: Client is a public client and does not have a client secret
+ * client_secret_post: Client uses the HTTP POST parameters
+ * client_secret_basic: Client uses HTTP Basic
+ """
+ if endpoint == "token":
+ return (method in self.token_endpoint_auth_method
+ and method == "client_secret_post")
+ if endpoint in ("introspection", "revoke"):
+ return (method in self.token_endpoint_auth_method
+ and method == "client_secret_basic")
+ return False
+
+ @property
+ def id(self):# pylint: disable=[invalid-name]
+ """Return the client_id."""
+ return self.client_id
+
+ @property
+ def grant_types(self) -> Sequence[str]:
+ """
+ Return the grant types that this client supports.
+
+ Valid grant types:
+ * authorisation_code
+ * implicit
+ * client_credentials
+ * password
+ """
+ return self.client_metadata.get("grant_types", [])
+
+ def check_grant_type(self, grant_type: str) -> bool:
+ """
+ Validate that client can handle the given grant types
+ """
+ return grant_type in self.grant_types
+
+ @property
+ def redirect_uris(self) -> Sequence[str]:
+ """Return the redirect_uris that this client supports."""
+ return self.client_metadata.get('redirect_uris', [])
+
+ def check_redirect_uri(self, redirect_uri: str) -> bool:
+ """
+ Check whether the given `redirect_uri` is one of the expected ones.
+ """
+ return redirect_uri in self.redirect_uris
+
+ @property
+ def response_types(self) -> Sequence[str]:
+ """Return the response_types that this client supports."""
+ return self.client_metadata.get("response_type", [])
+
+ def check_response_type(self, response_type: str) -> bool:
+ """Check whether this client supports `response_type`."""
+ return response_type in self.response_types
+
+ @property
+ def scope(self) -> Sequence[str]:
+ """Return valid scopes for this client."""
+ return tuple(set(self.client_metadata.get("scope", [])))
+
+ def get_allowed_scope(self, scope: str) -> str:
+ """Return list of scopes in `scope` that are supported by this client."""
+ if not bool(scope):
+ return ""
+ requested = scope.split()
+ return " ".join(sorted(set(
+ scp for scp in requested if scp in self.scope)))
+
+ def get_client_id(self):
+ """Return this client's identifier."""
+ return self.client_id
+
+ def get_default_redirect_uri(self) -> str:
+ """Return the default redirect uri"""
+ return self.client_metadata.get("default_redirect_uri", "")
+
+def client(conn: db.DbConnection, client_id: UUID,
+ user: Optional[User] = None) -> Maybe:
+ """Retrieve a client by its ID"""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT * FROM oauth2_clients WHERE client_id=?", (str(client_id),))
+ result = cursor.fetchone()
+ the_user = user
+ if result:
+ if not bool(the_user):
+ try:
+ the_user = user_by_id(conn, result["user_id"])
+ except NotFoundError as _nfe:
+ the_user = None
+
+ return Just(
+ OAuth2Client(UUID(result["client_id"]),
+ result["client_secret"],
+ datetime.datetime.fromtimestamp(
+ result["client_id_issued_at"]),
+ datetime.datetime.fromtimestamp(
+ result["client_secret_expires_at"]),
+ json.loads(result["client_metadata"]),
+ the_user))# type: ignore[arg-type]
+
+ return Nothing
+
+def client_by_id_and_secret(conn: db.DbConnection, client_id: UUID,
+ client_secret: str) -> OAuth2Client:
+ """Retrieve a client by its ID and secret"""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT * FROM oauth2_clients WHERE client_id=?",
+ (str(client_id),))
+ row = cursor.fetchone()
+ if bool(row) and same_password(client_secret, row["client_secret"]):
+ return OAuth2Client(
+ client_id, client_secret,
+ datetime.datetime.fromtimestamp(row["client_id_issued_at"]),
+ datetime.datetime.fromtimestamp(
+ row["client_secret_expires_at"]),
+ json.loads(row["client_metadata"]),
+ user_by_id(conn, UUID(row["user_id"])))
+
+ raise NotFoundError("Could not find client with the given credentials.")
+
+def save_client(conn: db.DbConnection, the_client: OAuth2Client) -> OAuth2Client:
+ """Persist the client details into the database."""
+ with db.cursor(conn) as cursor:
+ query = (
+ "INSERT INTO oauth2_clients "
+ "(client_id, client_secret, client_id_issued_at, "
+ "client_secret_expires_at, client_metadata, user_id) "
+ "VALUES "
+ "(:client_id, :client_secret, :client_id_issued_at, "
+ ":client_secret_expires_at, :client_metadata, :user_id) "
+ "ON CONFLICT (client_id) DO UPDATE SET "
+ "client_secret=:client_secret, "
+ "client_id_issued_at=:client_id_issued_at, "
+ "client_secret_expires_at=:client_secret_expires_at, "
+ "client_metadata=:client_metadata, user_id=:user_id")
+ cursor.execute(
+ query,
+ {
+ "client_id": str(the_client.client_id),
+ "client_secret": the_client.client_secret,
+ "client_id_issued_at": (
+ the_client.client_id_issued_at.timestamp()),
+ "client_secret_expires_at": (
+ the_client.client_secret_expires_at.timestamp()),
+ "client_metadata": json.dumps(the_client.client_metadata),
+ "user_id": str(the_client.user.user_id)
+ })
+ return the_client
+
+def oauth2_clients(conn: db.DbConnection) -> tuple[OAuth2Client, ...]:
+ """Fetch a list of all OAuth2 clients."""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM oauth2_clients")
+ clients_rs = cursor.fetchall()
+ the_users = {
+ usr.user_id: usr for usr in users(
+ conn, tuple({UUID(result["user_id"]) for result in clients_rs}))
+ }
+ return tuple(OAuth2Client(UUID(result["client_id"]),
+ result["client_secret"],
+ datetime.datetime.fromtimestamp(
+ result["client_id_issued_at"]),
+ datetime.datetime.fromtimestamp(
+ result["client_secret_expires_at"]),
+ json.loads(result["client_metadata"]),
+ the_users[UUID(result["user_id"])])
+ for result in clients_rs)
+
+def delete_client(conn: db.DbConnection, the_client: OAuth2Client) -> OAuth2Client:
+ """Delete the given client from the database"""
+ with db.cursor(conn) as cursor:
+ params = (str(the_client.client_id),)
+ cursor.execute("DELETE FROM authorisation_code WHERE client_id=?",
+ params)
+ cursor.execute("DELETE FROM oauth2_tokens WHERE client_id=?", params)
+ cursor.execute("DELETE FROM oauth2_clients WHERE client_id=?", params)
+ return the_client
diff --git a/gn_auth/auth/authentication/oauth2/models/oauth2token.py b/gn_auth/auth/authentication/oauth2/models/oauth2token.py
new file mode 100644
index 0000000..72e20cc
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/models/oauth2token.py
@@ -0,0 +1,132 @@
+"""OAuth2 Token"""
+import uuid
+import datetime
+from typing import NamedTuple, Optional
+
+from pymonad.maybe import Just, Maybe, Nothing
+
+from gn3.auth import db
+from gn3.auth.authentication.users import User, user_by_id
+
+from gn3.auth.authorisation.errors import NotFoundError
+
+from .oauth2client import client, OAuth2Client
+
+class OAuth2Token(NamedTuple):
+ """Implement Tokens for OAuth2."""
+ token_id: uuid.UUID
+ client: OAuth2Client
+ token_type: str
+ access_token: str
+ refresh_token: Optional[str]
+ scope: str
+ revoked: bool
+ issued_at: datetime.datetime
+ expires_in: int
+ user: User
+
+ @property
+ def expires_at(self) -> datetime.datetime:
+ """Return the time when the token expires."""
+ return self.issued_at + datetime.timedelta(seconds=self.expires_in)
+
+ def check_client(self, client: OAuth2Client) -> bool:# pylint: disable=[redefined-outer-name]
+ """Check whether the token is issued to given `client`."""
+ return client.client_id == self.client.client_id
+
+ def get_expires_in(self) -> int:
+ """Return the `expires_in` value for the token."""
+ return self.expires_in
+
+ def get_scope(self) -> str:
+ """Return the valid scope for the token."""
+ return self.scope
+
+ def is_expired(self) -> bool:
+ """Check whether the token is expired."""
+ return self.expires_at < datetime.datetime.now()
+
+ def is_revoked(self):
+ """Check whether the token has been revoked."""
+ return self.revoked
+
+def __token_from_resultset__(conn: db.DbConnection, rset) -> Maybe:
+ __identity__ = lambda val: val
+ try:
+ the_user = user_by_id(conn, uuid.UUID(rset["user_id"]))
+ except NotFoundError as _nfe:
+ the_user = None
+ the_client = client(conn, uuid.UUID(rset["client_id"]), the_user)
+
+ if the_client.is_just() and bool(the_user):
+ return Just(OAuth2Token(token_id=uuid.UUID(rset["token_id"]),
+ client=the_client.maybe(None, __identity__),
+ token_type=rset["token_type"],
+ access_token=rset["access_token"],
+ refresh_token=rset["refresh_token"],
+ scope=rset["scope"],
+ revoked=(rset["revoked"] == 1),
+ issued_at=datetime.datetime.fromtimestamp(
+ rset["issued_at"]),
+ expires_in=rset["expires_in"],
+ user=the_user))# type: ignore[arg-type]
+
+ return Nothing
+
+def token_by_access_token(conn: db.DbConnection, token_str: str) -> Maybe:
+ """Retrieve token by its token string"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM oauth2_tokens WHERE access_token=?",
+ (token_str,))
+ res = cursor.fetchone()
+ if res:
+ return __token_from_resultset__(conn, res)
+
+ return Nothing
+
+def token_by_refresh_token(conn: db.DbConnection, token_str: str) -> Maybe:
+ """Retrieve token by its token string"""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT * FROM oauth2_tokens WHERE refresh_token=?",
+ (token_str,))
+ res = cursor.fetchone()
+ if res:
+ return __token_from_resultset__(conn, res)
+
+ return Nothing
+
+def revoke_token(token: OAuth2Token) -> OAuth2Token:
+ """
+ Return a new token derived from `token` with the `revoked` field set to
+ `True`.
+ """
+ return OAuth2Token(
+ token_id=token.token_id, client=token.client,
+ token_type=token.token_type, access_token=token.access_token,
+ refresh_token=token.refresh_token, scope=token.scope, revoked=True,
+ issued_at=token.issued_at, expires_in=token.expires_in, user=token.user)
+
+def save_token(conn: db.DbConnection, token: OAuth2Token) -> None:
+ """Save/Update the token."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("INSERT INTO oauth2_tokens VALUES (:token_id, :client_id, "
+ ":token_type, :access_token, :refresh_token, :scope, :revoked, "
+ ":issued_at, :expires_in, :user_id) "
+ "ON CONFLICT (token_id) DO UPDATE SET "
+ "refresh_token=:refresh_token, revoked=:revoked, "
+ "expires_in=:expires_in "
+ "WHERE token_id=:token_id"),
+ {
+ "token_id": str(token.token_id),
+ "client_id": str(token.client.client_id),
+ "token_type": token.token_type,
+ "access_token": token.access_token,
+ "refresh_token": token.refresh_token,
+ "scope": token.scope,
+ "revoked": 1 if token.revoked else 0,
+ "issued_at": int(token.issued_at.timestamp()),
+ "expires_in": token.expires_in,
+ "user_id": str(token.user.user_id)
+ })
diff --git a/gn_auth/auth/authentication/oauth2/resource_server.py b/gn_auth/auth/authentication/oauth2/resource_server.py
new file mode 100644
index 0000000..223e811
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/resource_server.py
@@ -0,0 +1,19 @@
+"""Protect the resources endpoints"""
+
+from flask import current_app as app
+from authlib.oauth2.rfc6750 import BearerTokenValidator as _BearerTokenValidator
+from authlib.integrations.flask_oauth2 import ResourceProtector
+
+from gn3.auth import db
+from gn3.auth.authentication.oauth2.models.oauth2token import token_by_access_token
+
+class BearerTokenValidator(_BearerTokenValidator):
+ """Extends `authlib.oauth2.rfc6750.BearerTokenValidator`"""
+ def authenticate_token(self, token_string: str):
+ with db.connection(app.config["AUTH_DB"]) as conn:
+ return token_by_access_token(conn, token_string).maybe(# type: ignore[misc]
+ None, lambda tok: tok)
+
+require_oauth = ResourceProtector()
+
+require_oauth.register_token_validator(BearerTokenValidator())
diff --git a/gn_auth/auth/authentication/oauth2/server.py b/gn_auth/auth/authentication/oauth2/server.py
new file mode 100644
index 0000000..7d7113a
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/server.py
@@ -0,0 +1,72 @@
+"""Initialise the OAuth2 Server"""
+import uuid
+import datetime
+from typing import Callable
+
+from flask import Flask, current_app
+from authlib.oauth2.rfc6749.errors import InvalidClientError
+from authlib.integrations.flask_oauth2 import AuthorizationServer
+# from authlib.oauth2.rfc7636 import CodeChallenge
+
+from gn3.auth import db
+
+from .models.oauth2client import client
+from .models.oauth2token import OAuth2Token, save_token
+
+from .grants.password_grant import PasswordGrant
+from .grants.authorisation_code_grant import AuthorisationCodeGrant
+
+from .endpoints.revocation import RevocationEndpoint
+from .endpoints.introspection import IntrospectionEndpoint
+
+def create_query_client_func() -> Callable:
+ """Create the function that loads the client."""
+ def __query_client__(client_id: uuid.UUID):
+ # use current_app rather than passing the db_uri to avoid issues
+ # when config changes, e.g. while testing.
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ the_client = client(conn, client_id).maybe(
+ None, lambda clt: clt) # type: ignore[misc]
+ if bool(the_client):
+ return the_client
+ raise InvalidClientError(
+ "No client found for the given CLIENT_ID and CLIENT_SECRET.")
+
+ return __query_client__
+
+def create_save_token_func(token_model: type) -> Callable:
+ """Create the function that saves the token."""
+ def __save_token__(token, request):
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ save_token(
+ conn, token_model(
+ token_id=uuid.uuid4(), client=request.client,
+ user=request.user,
+ **{
+ "refresh_token": None, "revoked": False,
+ "issued_at": datetime.datetime.now(),
+ **token
+ }))
+
+ return __save_token__
+
+def setup_oauth2_server(app: Flask) -> None:
+ """Set's up the oauth2 server for the flask application."""
+ server = AuthorizationServer()
+ server.register_grant(PasswordGrant)
+
+ # Figure out a common `code_verifier` for GN2 and GN3 and set
+ # server.register_grant(AuthorisationCodeGrant, [CodeChallenge(required=False)])
+ # below
+ server.register_grant(AuthorisationCodeGrant)
+
+ # register endpoints
+ server.register_endpoint(RevocationEndpoint)
+ server.register_endpoint(IntrospectionEndpoint)
+
+ # init server
+ server.init_app(
+ app,
+ query_client=create_query_client_func(),
+ save_token=create_save_token_func(OAuth2Token))
+ app.config["OAUTH2_SERVER"] = server
diff --git a/gn_auth/auth/authentication/oauth2/views.py b/gn_auth/auth/authentication/oauth2/views.py
new file mode 100644
index 0000000..2bd3865
--- /dev/null
+++ b/gn_auth/auth/authentication/oauth2/views.py
@@ -0,0 +1,104 @@
+"""Endpoints for the oauth2 server"""
+import uuid
+import traceback
+
+from authlib.oauth2.rfc6749.errors import InvalidClientError
+from email_validator import validate_email, EmailNotValidError
+from flask import (
+ flash,
+ request,
+ url_for,
+ redirect,
+ Response,
+ Blueprint,
+ render_template,
+ current_app as app)
+
+from gn3.auth import db
+from gn3.auth.db_utils import with_db_connection
+from gn3.auth.authorisation.errors import ForbiddenAccess
+
+from .resource_server import require_oauth
+from .endpoints.revocation import RevocationEndpoint
+from .endpoints.introspection import IntrospectionEndpoint
+
+from ..users import valid_login, NotFoundError, user_by_email
+
+auth = Blueprint("auth", __name__)
+
+@auth.route("/delete-client/<uuid:client_id>", methods=["GET", "POST"])
+def delete_client(client_id: uuid.UUID):
+ """Delete an OAuth2 client."""
+ return f"WOULD DELETE OAUTH2 CLIENT {client_id}."
+
+@auth.route("/authorise", methods=["GET", "POST"])
+def authorise():
+ """Authorise a user"""
+ try:
+ server = app.config["OAUTH2_SERVER"]
+ client_id = uuid.UUID(request.args.get(
+ "client_id",
+ request.form.get("client_id", str(uuid.uuid4()))))
+ client = server.query_client(client_id)
+ if not bool(client):
+ flash("Invalid OAuth2 client.", "alert-error")
+ if request.method == "GET":
+ client = server.query_client(request.args.get("client_id"))
+ return render_template(
+ "oauth2/authorise-user.html",
+ client=client,
+ scope=client.scope,
+ response_type="code")
+
+ form = request.form
+ def __authorise__(conn: db.DbConnection) -> Response:
+ email_passwd_msg = "Email or password is invalid!"
+ redirect_response = redirect(url_for("oauth2.auth.authorise",
+ client_id=client_id))
+ try:
+ email = validate_email(
+ form.get("user:email"), check_deliverability=False)
+ user = user_by_email(conn, email["email"])
+ if valid_login(conn, user, form.get("user:password", "")):
+ return server.create_authorization_response(request=request, grant_user=user)
+ flash(email_passwd_msg, "alert-error")
+ return redirect_response # type: ignore[return-value]
+ except EmailNotValidError as _enve:
+ app.logger.debug(traceback.format_exc())
+ flash(email_passwd_msg, "alert-error")
+ return redirect_response # type: ignore[return-value]
+ except NotFoundError as _nfe:
+ app.logger.debug(traceback.format_exc())
+ flash(email_passwd_msg, "alert-error")
+ return redirect_response # type: ignore[return-value]
+
+ return with_db_connection(__authorise__)
+ except InvalidClientError as ice:
+ return render_template(
+ "oauth2/oauth2_error.html", error=ice), ice.status_code
+
+@auth.route("/token", methods=["POST"])
+def token():
+ """Retrieve the authorisation token."""
+ server = app.config["OAUTH2_SERVER"]
+ return server.create_token_response()
+
+@auth.route("/revoke", methods=["POST"])
+def revoke_token():
+ """Revoke the token."""
+ return app.config["OAUTH2_SERVER"].create_endpoint_response(
+ RevocationEndpoint.ENDPOINT_NAME)
+
+@auth.route("/introspect", methods=["POST"])
+@require_oauth("introspect")
+def introspect_token() -> Response:
+ """Provide introspection information for the token."""
+ # This is dangerous to provide publicly
+ authorised_clients = app.config.get(
+ "OAUTH2_CLIENTS_WITH_INTROSPECTION_PRIVILEGE", [])
+ with require_oauth.acquire("introspect") as the_token:
+ if the_token.client.client_id in authorised_clients:
+ return app.config["OAUTH2_SERVER"].create_endpoint_response(
+ IntrospectionEndpoint.ENDPOINT_NAME)
+
+ raise ForbiddenAccess("You cannot access this endpoint")
diff --git a/gn_auth/auth/authentication/users.py b/gn_auth/auth/authentication/users.py
new file mode 100644
index 0000000..0e72ed2
--- /dev/null
+++ b/gn_auth/auth/authentication/users.py
@@ -0,0 +1,128 @@
+"""User-specific code and data structures."""
+from uuid import UUID, uuid4
+from typing import Any, Tuple, NamedTuple
+
+from argon2 import PasswordHasher
+from argon2.exceptions import VerifyMismatchError
+
+from gn3.auth import db
+from gn3.auth.authorisation.errors import NotFoundError
+
+class User(NamedTuple):
+ """Class representing a user."""
+ user_id: UUID
+ email: str
+ name: str
+
+ def get_user_id(self):
+ """Return the user's UUID. Mostly for use with Authlib."""
+ return self.user_id
+
+ def dictify(self) -> dict[str, Any]:
+ """Return a dict representation of `User` objects."""
+ return {"user_id": self.user_id, "email": self.email, "name": self.name}
+
+DUMMY_USER = User(user_id=UUID("a391cf60-e8b7-4294-bd22-ddbbda4b3530"),
+ email="gn3@dummy.user",
+ name="Dummy user to use as placeholder")
+
+def user_by_email(conn: db.DbConnection, email: str) -> User:
+ """Retrieve user from database by their email address"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM users WHERE email=?", (email,))
+ row = cursor.fetchone()
+
+ if row:
+ return User(UUID(row["user_id"]), row["email"], row["name"])
+
+ raise NotFoundError(f"Could not find user with email {email}")
+
+def user_by_id(conn: db.DbConnection, user_id: UUID) -> User:
+ """Retrieve user from database by their user id"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM users WHERE user_id=?", (str(user_id),))
+ row = cursor.fetchone()
+
+ if row:
+ return User(UUID(row["user_id"]), row["email"], row["name"])
+
+ raise NotFoundError(f"Could not find user with ID {user_id}")
+
+def same_password(password: str, hashed: str) -> bool:
+ """Check that `raw_password` is hashed to `hash`"""
+ try:
+ return hasher().verify(hashed, password)
+ except VerifyMismatchError as _vme:
+ return False
+
+def valid_login(conn: db.DbConnection, user: User, password: str) -> bool:
+ """Check the validity of the provided credentials for login."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("SELECT * FROM users LEFT JOIN user_credentials "
+ "ON users.user_id=user_credentials.user_id "
+ "WHERE users.user_id=?"),
+ (str(user.user_id),))
+ row = cursor.fetchone()
+
+ if row is None:
+ return False
+
+ return same_password(password, row["password"])
+
+def save_user(cursor: db.DbCursor, email: str, name: str) -> User:
+ """
+ Create and persist a user.
+
+ The user creation could be done during a transaction, therefore the function
+ takes a cursor object rather than a connection.
+
+ The newly created and persisted user is then returned.
+ """
+ user_id = uuid4()
+ cursor.execute("INSERT INTO users VALUES (?, ?, ?)",
+ (str(user_id), email, name))
+ return User(user_id, email, name)
+
+def hasher():
+ """Retrieve PasswordHasher object"""
+ # TODO: Maybe tune the parameters here...
+ # Tuneable Parameters:
+ # - time_cost (default: 2)
+ # - memory_cost (default: 102400)
+ # - parallelism (default: 8)
+ # - hash_len (default: 16)
+ # - salt_len (default: 16)
+ # - encoding (default: 'utf-8')
+ # - type (default: <Type.ID: 2>)
+ return PasswordHasher()
+
+def hash_password(password):
+ """Hash the password."""
+ return hasher().hash(password)
+
+def set_user_password(
+ cursor: db.DbCursor, user: User, password: str) -> Tuple[User, bytes]:
+ """Set the given user's password in the database."""
+ hashed_password = hash_password(password)
+ cursor.execute(
+ ("INSERT INTO user_credentials VALUES (:user_id, :hash) "
+ "ON CONFLICT (user_id) DO UPDATE SET password=:hash"),
+ {"user_id": str(user.user_id), "hash": hashed_password})
+ return user, hashed_password
+
+def users(conn: db.DbConnection,
+ ids: tuple[UUID, ...] = tuple()) -> tuple[User, ...]:
+ """
+ Fetch all users with the given `ids`. If `ids` is empty, return ALL users.
+ """
+ params = ", ".join(["?"] * len(ids))
+ with db.cursor(conn) as cursor:
+ query = "SELECT * FROM users" + (
+ f" WHERE user_id IN ({params})"
+ if len(ids) > 0 else "")
+ print(query)
+ cursor.execute(query, tuple(str(the_id) for the_id in ids))
+ return tuple(User(UUID(row["user_id"]), row["email"], row["name"])
+ for row in cursor.fetchall())
+ return tuple()
diff --git a/gn_auth/auth/authorisation/__init__.py b/gn_auth/auth/authorisation/__init__.py
new file mode 100644
index 0000000..abd2747
--- /dev/null
+++ b/gn_auth/auth/authorisation/__init__.py
@@ -0,0 +1,2 @@
+"""The authorisation module."""
+from .checks import authorised_p
diff --git a/gn_auth/auth/authorisation/checks.py b/gn_auth/auth/authorisation/checks.py
new file mode 100644
index 0000000..1c87c02
--- /dev/null
+++ b/gn_auth/auth/authorisation/checks.py
@@ -0,0 +1,70 @@
+"""Functions to check for authorisation."""
+from functools import wraps
+from typing import Callable
+
+from flask import request, current_app as app
+
+from gn3.auth import db
+
+from . import privileges as auth_privs
+from .errors import InvalidData, AuthorisationError
+
+from ..authentication.oauth2.resource_server import require_oauth
+
+def __system_privileges_in_roles__(conn, user):
+ """
+ This really is a hack since groups are not treated as resources at the
+ moment of writing this.
+
+ We need a way of allowing the user to have the system:group:* privileges.
+ """
+ query = (
+ "SELECT DISTINCT p.* FROM users AS u "
+ "INNER JOIN group_user_roles_on_resources AS guror "
+ "ON u.user_id=guror.user_id "
+ "INNER JOIN roles AS r ON guror.role_id=r.role_id "
+ "INNER JOIN role_privileges AS rp ON r.role_id=rp.role_id "
+ "INNER JOIN privileges AS p ON rp.privilege_id=p.privilege_id "
+ "WHERE u.user_id=? AND p.privilege_id LIKE 'system:%'")
+ with db.cursor(conn) as cursor:
+ cursor.execute(query, (str(user.user_id),))
+ return (row["privilege_id"] for row in cursor.fetchall())
+
+def authorised_p(
+ privileges: tuple[str, ...],
+ error_description: str = (
+ "You lack authorisation to perform requested action"),
+ oauth2_scope = "profile"):
+ """Authorisation decorator."""
+ assert len(privileges) > 0, "You must provide at least one privilege"
+ def __build_authoriser__(func: Callable):
+ @wraps(func)
+ def __authoriser__(*args, **kwargs):
+ # the_user = user or (hasattr(g, "user") and g.user)
+ with require_oauth.acquire(oauth2_scope) as the_token:
+ the_user = the_token.user
+ if the_user:
+ with db.connection(app.config["AUTH_DB"]) as conn:
+ user_privileges = tuple(
+ priv.privilege_id for priv in
+ auth_privs.user_privileges(conn, the_user)) + tuple(
+ priv_id for priv_id in
+ __system_privileges_in_roles__(conn, the_user))
+
+ not_assigned = [
+ priv for priv in privileges if priv not in user_privileges]
+ if len(not_assigned) == 0:
+ return func(*args, **kwargs)
+
+ raise AuthorisationError(error_description)
+ return __authoriser__
+ return __build_authoriser__
+
+def require_json(func):
+ """Ensure the request has JSON data."""
+ @wraps(func)
+ def __req_json__(*args, **kwargs):
+ if bool(request.json):
+ return func(*args, **kwargs)
+ raise InvalidData("Expected JSON data in the request.")
+ return __req_json__
diff --git a/gn_auth/auth/authorisation/data/__init__.py b/gn_auth/auth/authorisation/data/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/auth/authorisation/data/__init__.py
diff --git a/gn_auth/auth/authorisation/data/genotypes.py b/gn_auth/auth/authorisation/data/genotypes.py
new file mode 100644
index 0000000..8f901a5
--- /dev/null
+++ b/gn_auth/auth/authorisation/data/genotypes.py
@@ -0,0 +1,96 @@
+"""Handle linking of Genotype data to the Auth(entic|oris)ation system."""
+import uuid
+from typing import Iterable
+
+from MySQLdb.cursors import DictCursor
+
+import gn3.auth.db as authdb
+import gn3.db_utils as gn3db
+from gn3.auth.dictify import dictify
+from gn3.auth.authorisation.checks import authorised_p
+from gn3.auth.authorisation.groups.models import Group
+
+def linked_genotype_data(conn: authdb.DbConnection) -> Iterable[dict]:
+ """Retrive genotype data that is linked to user groups."""
+ with authdb.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM linked_genotype_data")
+ return (dict(row) for row in cursor.fetchall())
+
+@authorised_p(("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def ungrouped_genotype_data(# pylint: disable=[too-many-arguments]
+ authconn: authdb.DbConnection, gn3conn: gn3db.Connection,
+ search_query: str, selected: tuple[dict, ...] = tuple(),
+ limit: int = 10000, offset: int = 0) -> tuple[
+ dict, ...]:
+ """Retrieve genotype data that is not linked to any user group."""
+ params = tuple(
+ (row["SpeciesId"], row["InbredSetId"], row["GenoFreezeId"])
+ for row in linked_genotype_data(authconn)) + tuple(
+ (row["SpeciesId"], row["InbredSetId"], row["GenoFreezeId"])
+ for row in selected)
+ query = (
+ "SELECT s.SpeciesId, iset.InbredSetId, iset.InbredSetName, "
+ "gf.Id AS GenoFreezeId, gf.Name AS dataset_name, "
+ "gf.FullName AS dataset_fullname, "
+ "gf.ShortName AS dataset_shortname "
+ "FROM Species AS s INNER JOIN InbredSet AS iset "
+ "ON s.SpeciesId=iset.SpeciesId INNER JOIN GenoFreeze AS gf "
+ "ON iset.InbredSetId=gf.InbredSetId ")
+
+ if len(params) > 0 or bool(search_query):
+ query = query + "WHERE "
+
+ if len(params) > 0:
+ paramstr = ", ".join(["(%s, %s, %s)"] * len(params))
+ query = query + (
+ "(s.SpeciesId, iset.InbredSetId, gf.Id) "
+ f"NOT IN ({paramstr}) "
+ ) + ("AND " if bool(search_query) else "")
+
+ if bool(search_query):
+ query = query + (
+ "CONCAT(gf.Name, ' ', gf.FullName, ' ', gf.ShortName) LIKE %s ")
+ params = params + ((f"%{search_query}%",),)# type: ignore[operator]
+
+ query = query + f"LIMIT {int(limit)} OFFSET {int(offset)}"
+ with gn3conn.cursor(DictCursor) as cursor:
+ cursor.execute(
+ query, tuple(item for sublist in params for item in sublist))
+ return tuple(row for row in cursor.fetchall())
+
+@authorised_p(
+ ("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def link_genotype_data(
+ conn: authdb.DbConnection, group: Group, datasets: dict) -> dict:
+ """Link genotye `datasets` to `group`."""
+ with authdb.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO linked_genotype_data VALUES "
+ "(:data_link_id, :group_id, :SpeciesId, :InbredSetId, "
+ ":GenoFreezeId, :dataset_name, :dataset_fullname, "
+ ":dataset_shortname) "
+ "ON CONFLICT (SpeciesId, InbredSetId, GenoFreezeId) DO NOTHING",
+ tuple({
+ "data_link_id": str(uuid.uuid4()),
+ "group_id": str(group.group_id),
+ **{
+ key: value for key,value in dataset.items() if key in (
+ "GenoFreezeId", "InbredSetId", "SpeciesId",
+ "dataset_fullname", "dataset_name", "dataset_shortname")
+ }
+ } for dataset in datasets))
+ return {
+ "description": (
+ f"Successfully linked {len(datasets)} to group "
+ f"'{group.group_name}'."),
+ "group": dictify(group),
+ "datasets": datasets
+ }
diff --git a/gn_auth/auth/authorisation/data/mrna.py b/gn_auth/auth/authorisation/data/mrna.py
new file mode 100644
index 0000000..bdfc5c1
--- /dev/null
+++ b/gn_auth/auth/authorisation/data/mrna.py
@@ -0,0 +1,100 @@
+"""Handle linking of mRNA Assay data to the Auth(entic|oris)ation system."""
+import uuid
+from typing import Iterable
+from MySQLdb.cursors import DictCursor
+
+import gn3.auth.db as authdb
+import gn3.db_utils as gn3db
+from gn3.auth.dictify import dictify
+from gn3.auth.authorisation.checks import authorised_p
+from gn3.auth.authorisation.groups.models import Group
+
+def linked_mrna_data(conn: authdb.DbConnection) -> Iterable[dict]:
+ """Retrieve mRNA Assay data that is linked to user groups."""
+ with authdb.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM linked_mrna_data")
+ return (dict(row) for row in cursor.fetchall())
+
+@authorised_p(("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def ungrouped_mrna_data(# pylint: disable=[too-many-arguments]
+ authconn: authdb.DbConnection, gn3conn: gn3db.Connection,
+ search_query: str, selected: tuple[dict, ...] = tuple(),
+ limit: int = 10000, offset: int = 0) -> tuple[
+ dict, ...]:
+ """Retrieve mrna data that is not linked to any user group."""
+ params = tuple(
+ (row["SpeciesId"], row["InbredSetId"], row["ProbeFreezeId"],
+ row["ProbeSetFreezeId"])
+ for row in linked_mrna_data(authconn)) + tuple(
+ (row["SpeciesId"], row["InbredSetId"], row["ProbeFreezeId"],
+ row["ProbeSetFreezeId"])
+ for row in selected)
+ query = (
+ "SELECT s.SpeciesId, iset.InbredSetId, iset.InbredSetName, "
+ "pf.ProbeFreezeId, pf.Name AS StudyName, psf.Id AS ProbeSetFreezeId, "
+ "psf.Name AS dataset_name, psf.FullName AS dataset_fullname, "
+ "psf.ShortName AS dataset_shortname "
+ "FROM Species AS s INNER JOIN InbredSet AS iset "
+ "ON s.SpeciesId=iset.SpeciesId INNER JOIN ProbeFreeze AS pf "
+ "ON iset.InbredSetId=pf.InbredSetId INNER JOIN ProbeSetFreeze AS psf "
+ "ON pf.ProbeFreezeId=psf.ProbeFreezeId ") + (
+ "WHERE " if (len(params) > 0 or bool(search_query)) else "")
+
+ if len(params) > 0:
+ paramstr = ", ".join(["(%s, %s, %s, %s)"] * len(params))
+ query = query + (
+ "(s.SpeciesId, iset.InbredSetId, pf.ProbeFreezeId, psf.Id) "
+ f"NOT IN ({paramstr}) "
+ ) + ("AND " if bool(search_query) else "")
+
+ if bool(search_query):
+ query = query + (
+ "CONCAT(pf.Name, psf.Name, ' ', psf.FullName, ' ', psf.ShortName) "
+ "LIKE %s ")
+ params = params + ((f"%{search_query}%",),)# type: ignore[operator]
+
+ query = query + f"LIMIT {int(limit)} OFFSET {int(offset)}"
+ with gn3conn.cursor(DictCursor) as cursor:
+ cursor.execute(
+ query, tuple(item for sublist in params for item in sublist))
+ return tuple(row for row in cursor.fetchall())
+
+@authorised_p(
+ ("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def link_mrna_data(
+ conn: authdb.DbConnection, group: Group, datasets: dict) -> dict:
+ """Link genotye `datasets` to `group`."""
+ with authdb.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO linked_mrna_data VALUES "
+ "(:data_link_id, :group_id, :SpeciesId, :InbredSetId, "
+ ":ProbeFreezeId, :ProbeSetFreezeId, :dataset_name, "
+ ":dataset_fullname, :dataset_shortname) "
+ "ON CONFLICT "
+ "(SpeciesId, InbredSetId, ProbeFreezeId, ProbeSetFreezeId) "
+ "DO NOTHING",
+ tuple({
+ "data_link_id": str(uuid.uuid4()),
+ "group_id": str(group.group_id),
+ **{
+ key: value for key,value in dataset.items() if key in (
+ "SpeciesId", "InbredSetId", "ProbeFreezeId",
+ "ProbeSetFreezeId", "dataset_fullname", "dataset_name",
+ "dataset_shortname")
+ }
+ } for dataset in datasets))
+ return {
+ "description": (
+ f"Successfully linked {len(datasets)} to group "
+ f"'{group.group_name}'."),
+ "group": dictify(group),
+ "datasets": datasets
+ }
diff --git a/gn_auth/auth/authorisation/data/phenotypes.py b/gn_auth/auth/authorisation/data/phenotypes.py
new file mode 100644
index 0000000..ff98295
--- /dev/null
+++ b/gn_auth/auth/authorisation/data/phenotypes.py
@@ -0,0 +1,140 @@
+"""Handle linking of Phenotype data to the Auth(entic|oris)ation system."""
+import uuid
+from typing import Any, Iterable
+
+from MySQLdb.cursors import DictCursor
+
+import gn3.auth.db as authdb
+import gn3.db_utils as gn3db
+from gn3.auth.dictify import dictify
+from gn3.auth.authorisation.checks import authorised_p
+from gn3.auth.authorisation.groups.models import Group
+
+def linked_phenotype_data(
+ authconn: authdb.DbConnection, gn3conn: gn3db.Connection,
+ species: str = "") -> Iterable[dict[str, Any]]:
+ """Retrieve phenotype data linked to user groups."""
+ authkeys = ("SpeciesId", "InbredSetId", "PublishFreezeId", "PublishXRefId")
+ with (authdb.cursor(authconn) as authcursor,
+ gn3conn.cursor(DictCursor) as gn3cursor):
+ authcursor.execute("SELECT * FROM linked_phenotype_data")
+ linked = tuple(tuple(row[key] for key in authkeys)
+ for row in authcursor.fetchall())
+ if len(linked) <= 0:
+ return iter(())
+ paramstr = ", ".join(["(%s, %s, %s, %s)"] * len(linked))
+ query = (
+ "SELECT spc.SpeciesId, spc.Name AS SpeciesName, iset.InbredSetId, "
+ "iset.InbredSetName, pf.Id AS PublishFreezeId, "
+ "pf.Name AS dataset_name, pf.FullName AS dataset_fullname, "
+ "pf.ShortName AS dataset_shortname, pxr.Id AS PublishXRefId "
+ "FROM "
+ "Species AS spc "
+ "INNER JOIN InbredSet AS iset "
+ "ON spc.SpeciesId=iset.SpeciesId "
+ "INNER JOIN PublishFreeze AS pf "
+ "ON iset.InbredSetId=pf.InbredSetId "
+ "INNER JOIN PublishXRef AS pxr "
+ "ON pf.InbredSetId=pxr.InbredSetId") + (
+ " WHERE" if (len(linked) > 0 or bool(species)) else "") + (
+ (" (spc.SpeciesId, iset.InbredSetId, pf.Id, pxr.Id) "
+ f"IN ({paramstr})") if len(linked) > 0 else "") + (
+ " AND"if len(linked) > 0 else "") + (
+ " spc.SpeciesName=%s" if bool(species) else "")
+ params = tuple(item for sublist in linked for item in sublist) + (
+ (species,) if bool(species) else tuple())
+ gn3cursor.execute(query, params)
+ return (item for item in gn3cursor.fetchall())
+
+@authorised_p(("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def ungrouped_phenotype_data(
+ authconn: authdb.DbConnection, gn3conn: gn3db.Connection):
+ """Retrieve phenotype data that is not linked to any user group."""
+ with gn3conn.cursor() as cursor:
+ params = tuple(
+ (row["SpeciesId"], row["InbredSetId"], row["PublishFreezeId"],
+ row["PublishXRefId"])
+ for row in linked_phenotype_data(authconn, gn3conn))
+ paramstr = ", ".join(["(?, ?, ?, ?)"] * len(params))
+ query = (
+ "SELECT spc.SpeciesId, spc.SpeciesName, iset.InbredSetId, "
+ "iset.InbredSetName, pf.Id AS PublishFreezeId, "
+ "pf.Name AS dataset_name, pf.FullName AS dataset_fullname, "
+ "pf.ShortName AS dataset_shortname, pxr.Id AS PublishXRefId "
+ "FROM "
+ "Species AS spc "
+ "INNER JOIN InbredSet AS iset "
+ "ON spc.SpeciesId=iset.SpeciesId "
+ "INNER JOIN PublishFreeze AS pf "
+ "ON iset.InbredSetId=pf.InbredSetId "
+ "INNER JOIN PublishXRef AS pxr "
+ "ON pf.InbredSetId=pxr.InbredSetId")
+ if len(params) > 0:
+ query = query + (
+ f" WHERE (iset.InbredSetId, pf.Id, pxr.Id) NOT IN ({paramstr})")
+
+ cursor.execute(query, params)
+ return tuple(dict(row) for row in cursor.fetchall())
+
+ return tuple()
+
+def __traits__(gn3conn: gn3db.Connection, params: tuple[dict, ...]) -> tuple[dict, ...]:
+ """An internal utility function. Don't use outside of this module."""
+ if len(params) < 1:
+ return tuple()
+ paramstr = ", ".join(["(%s, %s, %s, %s)"] * len(params))
+ with gn3conn.cursor(DictCursor) as cursor:
+ cursor.execute(
+ "SELECT spc.SpeciesId, iset.InbredSetId, pf.Id AS PublishFreezeId, "
+ "pf.Name AS dataset_name, pf.FullName AS dataset_fullname, "
+ "pf.ShortName AS dataset_shortname, pxr.Id AS PublishXRefId "
+ "FROM "
+ "Species AS spc "
+ "INNER JOIN InbredSet AS iset "
+ "ON spc.SpeciesId=iset.SpeciesId "
+ "INNER JOIN PublishFreeze AS pf "
+ "ON iset.InbredSetId=pf.InbredSetId "
+ "INNER JOIN PublishXRef AS pxr "
+ "ON pf.InbredSetId=pxr.InbredSetId "
+ "WHERE (spc.SpeciesName, iset.InbredSetName, pf.Name, pxr.Id) "
+ f"IN ({paramstr})",
+ tuple(
+ itm for sublist in (
+ (item["species"], item["group"], item["dataset"], item["name"])
+ for item in params)
+ for itm in sublist))
+ return cursor.fetchall()
+
+@authorised_p(("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def link_phenotype_data(
+ authconn:authdb.DbConnection, gn3conn: gn3db.Connection, group: Group,
+ traits: tuple[dict, ...]) -> dict:
+ """Link phenotype traits to a user group."""
+ with authdb.cursor(authconn) as cursor:
+ params = tuple({
+ "data_link_id": str(uuid.uuid4()),
+ "group_id": str(group.group_id),
+ **item
+ } for item in __traits__(gn3conn, traits))
+ cursor.executemany(
+ "INSERT INTO linked_phenotype_data "
+ "VALUES ("
+ ":data_link_id, :group_id, :SpeciesId, :InbredSetId, "
+ ":PublishFreezeId, :dataset_name, :dataset_fullname, "
+ ":dataset_shortname, :PublishXRefId"
+ ")",
+ params)
+ return {
+ "description": (
+ f"Successfully linked {len(traits)} traits to group."),
+ "group": dictify(group),
+ "traits": params
+ }
diff --git a/gn_auth/auth/authorisation/data/views.py b/gn_auth/auth/authorisation/data/views.py
new file mode 100644
index 0000000..8adf862
--- /dev/null
+++ b/gn_auth/auth/authorisation/data/views.py
@@ -0,0 +1,310 @@
+"""Handle data endpoints."""
+import sys
+import uuid
+import json
+from typing import Any
+from functools import partial
+
+import redis
+from MySQLdb.cursors import DictCursor
+from authlib.integrations.flask_oauth2.errors import _HTTPException
+from flask import request, jsonify, Response, Blueprint, current_app as app
+
+import gn3.db_utils as gn3db
+from gn3 import jobs
+from gn3.commands import run_async_cmd
+from gn3.db.traits import build_trait_name
+
+from gn3.auth import db
+from gn3.auth.db_utils import with_db_connection
+
+from gn3.auth.authorisation.checks import require_json
+from gn3.auth.authorisation.errors import InvalidData, NotFoundError
+
+from gn3.auth.authorisation.groups.models import group_by_id
+
+from gn3.auth.authorisation.users.models import user_resource_roles
+
+from gn3.auth.authorisation.resources.checks import authorised_for
+from gn3.auth.authorisation.resources.models import (
+ user_resources, public_resources, attach_resources_data)
+
+from gn3.auth.authentication.users import User
+from gn3.auth.authentication.oauth2.resource_server import require_oauth
+
+from gn3.auth.authorisation.data.phenotypes import link_phenotype_data
+from gn3.auth.authorisation.data.mrna import link_mrna_data, ungrouped_mrna_data
+from gn3.auth.authorisation.data.genotypes import (
+ link_genotype_data, ungrouped_genotype_data)
+
+data = Blueprint("data", __name__)
+
+@data.route("species")
+def list_species() -> Response:
+ """List all available species information."""
+ with (gn3db.database_connection(app.config["SQL_URI"]) as gn3conn,
+ gn3conn.cursor(DictCursor) as cursor):
+ cursor.execute("SELECT * FROM Species")
+ return jsonify(tuple(dict(row) for row in cursor.fetchall()))
+
+@data.route("/authorisation", methods=["POST"])
+@require_json
+def authorisation() -> Response:
+ """Retrive the authorisation level for datasets/traits for the user."""
+ # Access endpoint with something like:
+ # curl -X POST http://127.0.0.1:8080/api/oauth2/data/authorisation \
+ # -H "Content-Type: application/json" \
+ # -d '{"traits": ["HC_M2_0606_P::1442370_at", "BXDGeno::01.001.695",
+ # "BXDPublish::10001"]}'
+ db_uri = app.config["AUTH_DB"]
+ privileges = {}
+ user = User(uuid.uuid4(), "anon@ymous.user", "Anonymous User")
+ with db.connection(db_uri) as auth_conn:
+ try:
+ with require_oauth.acquire("profile group resource") as the_token:
+ user = the_token.user
+ resources = attach_resources_data(
+ auth_conn, user_resources(auth_conn, the_token.user))
+ resources_roles = user_resource_roles(auth_conn, the_token.user)
+ privileges = {
+ resource_id: tuple(
+ privilege.privilege_id
+ for roles in resources_roles[resource_id]
+ for privilege in roles.privileges)#("group:resource:view-resource",)
+ for resource_id, is_authorised
+ in authorised_for(
+ auth_conn, the_token.user,
+ ("group:resource:view-resource",), tuple(
+ resource.resource_id for resource in resources)).items()
+ if is_authorised
+ }
+ except _HTTPException as exc:
+ err_msg = json.loads(exc.body)
+ if err_msg["error"] == "missing_authorization":
+ resources = attach_resources_data(
+ auth_conn, public_resources(auth_conn))
+ else:
+ raise exc from None
+
+ def __gen_key__(resource, data_item):
+ if resource.resource_category.resource_category_key.lower() == "phenotype":
+ return (
+ f"{resource.resource_category.resource_category_key.lower()}::"
+ f"{data_item['dataset_name']}::{data_item['PublishXRefId']}")
+ return (
+ f"{resource.resource_category.resource_category_key.lower()}::"
+ f"{data_item['dataset_name']}")
+
+ data_to_resource_map = {
+ __gen_key__(resource, data_item): resource.resource_id
+ for resource in resources
+ for data_item in resource.resource_data
+ }
+ privileges = {
+ **{
+ resource.resource_id: ("system:resource:public-read",)
+ for resource in resources if resource.public
+ },
+ **privileges}
+
+ args = request.get_json()
+ traits_names = args["traits"] # type: ignore[index]
+ def __translate__(val):
+ return {
+ "Temp": "Temp",
+ "ProbeSet": "mRNA",
+ "Geno": "Genotype",
+ "Publish": "Phenotype"
+ }[val]
+
+ def __trait_key__(trait):
+ dataset_type = __translate__(trait['db']['dataset_type']).lower()
+ dataset_name = trait["db"]["dataset_name"]
+ if dataset_type == "phenotype":
+ return f"{dataset_type}::{dataset_name}::{trait['trait_name']}"
+ return f"{dataset_type}::{dataset_name}"
+
+ return jsonify(tuple(
+ {
+ "user": user._asdict(),
+ **{key:trait[key] for key in ("trait_fullname", "trait_name")},
+ "dataset_name": trait["db"]["dataset_name"],
+ "dataset_type": __translate__(trait["db"]["dataset_type"]),
+ "resource_id": data_to_resource_map.get(__trait_key__(trait)),
+ "privileges": privileges.get(
+ data_to_resource_map.get(
+ __trait_key__(trait),
+ uuid.UUID("4afa415e-94cb-4189-b2c6-f9ce2b6a878d")),
+ tuple()) + (
+ # Temporary traits do not exist in db: Set them
+ # as public-read
+ ("system:resource:public-read",)
+ if trait["db"]["dataset_type"] == "Temp"
+ else tuple())
+ } for trait in
+ (build_trait_name(trait_fullname)
+ for trait_fullname in traits_names)))
+
+def __search_mrna__():
+ query = __request_key__("query", "")
+ limit = int(__request_key__("limit", 10000))
+ offset = int(__request_key__("offset", 0))
+ with gn3db.database_connection(app.config["SQL_URI"]) as gn3conn:
+ __ungrouped__ = partial(
+ ungrouped_mrna_data, gn3conn=gn3conn, search_query=query,
+ selected=__request_key_list__("selected"),
+ limit=limit, offset=offset)
+ return jsonify(with_db_connection(__ungrouped__))
+
+def __request_key__(key: str, default: Any = ""):
+ if bool(request.json):
+ return request.json.get(#type: ignore[union-attr]
+ key, request.args.get(key, request.form.get(key, default)))
+ return request.args.get(key, request.form.get(key, default))
+
+def __request_key_list__(key: str, default: tuple[Any, ...] = tuple()):
+ if bool(request.json):
+ return (request.json.get(key,[])#type: ignore[union-attr]
+ or request.args.getlist(key) or request.form.getlist(key)
+ or list(default))
+ return (request.args.getlist(key)
+ or request.form.getlist(key) or list(default))
+
+def __search_genotypes__():
+ query = __request_key__("query", "")
+ limit = int(__request_key__("limit", 10000))
+ offset = int(__request_key__("offset", 0))
+ with gn3db.database_connection(app.config["SQL_URI"]) as gn3conn:
+ __ungrouped__ = partial(
+ ungrouped_genotype_data, gn3conn=gn3conn, search_query=query,
+ selected=__request_key_list__("selected"),
+ limit=limit, offset=offset)
+ return jsonify(with_db_connection(__ungrouped__))
+
+def __search_phenotypes__():
+ # launch the external process to search for phenotypes
+ redisuri = app.config["REDIS_URI"]
+ with redis.Redis.from_url(redisuri, decode_responses=True) as redisconn:
+ job_id = uuid.uuid4()
+ selected = __request_key__("selected_traits", [])
+ command =[
+ sys.executable, "-m", "scripts.search_phenotypes",
+ __request_key__("species_name"),
+ __request_key__("query"),
+ str(job_id),
+ f"--host={__request_key__('gn3_server_uri')}",
+ f"--auth-db-uri={app.config['AUTH_DB']}",
+ f"--gn3-db-uri={app.config['SQL_URI']}",
+ f"--redis-uri={redisuri}",
+ f"--per-page={__request_key__('per_page')}"] +(
+ [f"--selected={json.dumps(selected)}"]
+ if len(selected) > 0 else [])
+ jobs.create_job(redisconn, {
+ "job_id": job_id, "command": command, "status": "queued",
+ "search_results": tuple()})
+ return jsonify({
+ "job_id": job_id,
+ "command_id": run_async_cmd(
+ redisconn, app.config.get("REDIS_JOB_QUEUE"), command),
+ "command": command
+ })
+
+@data.route("/search", methods=["GET"])
+@require_oauth("profile group resource")
+def search_unlinked_data():
+ """Search for various unlinked data."""
+ dataset_type = request.json["dataset_type"]
+ search_fns = {
+ "mrna": __search_mrna__,
+ "genotype": __search_genotypes__,
+ "phenotype": __search_phenotypes__
+ }
+ return search_fns[dataset_type]()
+
+@data.route("/search/phenotype/<uuid:job_id>", methods=["GET"])
+def pheno_search_results(job_id: uuid.UUID) -> Response:
+ """Get the search results from the external script"""
+ def __search_error__(err):
+ raise NotFoundError(err["error_description"])
+ redisuri = app.config["REDIS_URI"]
+ with redis.Redis.from_url(redisuri, decode_responses=True) as redisconn:
+ return jobs.job(redisconn, job_id).either(
+ __search_error__, jsonify)
+
+@data.route("/link/genotype", methods=["POST"])
+def link_genotypes() -> Response:
+ """Link genotype data to group."""
+ def __values__(form) -> dict[str, Any]:
+ if not bool(form.get("species_name", "").strip()):
+ raise InvalidData("Expected 'species_name' not provided.")
+ if not bool(form.get("group_id")):
+ raise InvalidData("Expected 'group_id' not provided.",)
+ try:
+ _group_id = uuid.UUID(form.get("group_id"))
+ except TypeError as terr:
+ raise InvalidData("Expected a UUID for 'group_id' value.") from terr
+ if not bool(form.get("selected")):
+ raise InvalidData("Expected at least one dataset to be provided.")
+ return {
+ "group_id": uuid.UUID(form.get("group_id")),
+ "datasets": form.get("selected")
+ }
+
+ def __link__(conn: db.DbConnection, group_id: uuid.UUID, datasets: dict):
+ return link_genotype_data(conn, group_by_id(conn, group_id), datasets)
+
+ return jsonify(with_db_connection(
+ partial(__link__, **__values__(request.json))))
+
+@data.route("/link/mrna", methods=["POST"])
+def link_mrna() -> Response:
+ """Link mrna data to group."""
+ def __values__(form) -> dict[str, Any]:
+ if not bool(form.get("species_name", "").strip()):
+ raise InvalidData("Expected 'species_name' not provided.")
+ if not bool(form.get("group_id")):
+ raise InvalidData("Expected 'group_id' not provided.",)
+ try:
+ _group_id = uuid.UUID(form.get("group_id"))
+ except TypeError as terr:
+ raise InvalidData("Expected a UUID for 'group_id' value.") from terr
+ if not bool(form.get("selected")):
+ raise InvalidData("Expected at least one dataset to be provided.")
+ return {
+ "group_id": uuid.UUID(form.get("group_id")),
+ "datasets": form.get("selected")
+ }
+
+ def __link__(conn: db.DbConnection, group_id: uuid.UUID, datasets: dict):
+ return link_mrna_data(conn, group_by_id(conn, group_id), datasets)
+
+ return jsonify(with_db_connection(
+ partial(__link__, **__values__(request.json))))
+
+@data.route("/link/phenotype", methods=["POST"])
+def link_phenotype() -> Response:
+ """Link phenotype data to group."""
+ def __values__(form):
+ if not bool(form.get("species_name", "").strip()):
+ raise InvalidData("Expected 'species_name' not provided.")
+ if not bool(form.get("group_id")):
+ raise InvalidData("Expected 'group_id' not provided.",)
+ try:
+ _group_id = uuid.UUID(form.get("group_id"))
+ except TypeError as terr:
+ raise InvalidData("Expected a UUID for 'group_id' value.") from terr
+ if not bool(form.get("selected")):
+ raise InvalidData("Expected at least one dataset to be provided.")
+ return {
+ "group_id": uuid.UUID(form["group_id"]),
+ "traits": form["selected"]
+ }
+
+ with gn3db.database_connection(app.config["SQL_URI"]) as gn3conn:
+ def __link__(conn: db.DbConnection, group_id: uuid.UUID,
+ traits: tuple[dict, ...]) -> dict:
+ return link_phenotype_data(
+ conn, gn3conn, group_by_id(conn, group_id), traits)
+
+ return jsonify(with_db_connection(
+ partial(__link__, **__values__(request.json))))
diff --git a/gn_auth/auth/authorisation/errors.py b/gn_auth/auth/authorisation/errors.py
new file mode 100644
index 0000000..3bc7a04
--- /dev/null
+++ b/gn_auth/auth/authorisation/errors.py
@@ -0,0 +1,42 @@
+"""Authorisation exceptions"""
+
+class AuthorisationError(Exception):
+ """
+ Top-level exception for the `gn3.auth.authorisation` package.
+
+ All exceptions in this package should inherit from this class.
+ """
+ error_code: int = 400
+
+class ForbiddenAccess(AuthorisationError):
+ """Raised for forbidden access."""
+ error_code: int = 403
+
+class UserRegistrationError(AuthorisationError):
+ """Raised whenever a user registration fails"""
+
+class NotFoundError(AuthorisationError):
+ """Raised whenever we try fetching (a/an) object(s) that do(es) not exist."""
+ error_code: int = 404
+
+class InvalidData(AuthorisationError):
+ """
+ Exception if user requests invalid data
+ """
+ error_code: int = 400
+
+class InconsistencyError(AuthorisationError):
+ """
+ Exception raised due to data inconsistencies
+ """
+ error_code: int = 500
+
+class PasswordError(AuthorisationError):
+ """
+ Raise in case of an error with passwords.
+ """
+
+class UsernameError(AuthorisationError):
+ """
+ Raise in case of an error with a user's name.
+ """
diff --git a/gn_auth/auth/authorisation/groups/__init__.py b/gn_auth/auth/authorisation/groups/__init__.py
new file mode 100644
index 0000000..1cb0bba
--- /dev/null
+++ b/gn_auth/auth/authorisation/groups/__init__.py
@@ -0,0 +1,3 @@
+"""Initialise the `gn3.auth.authorisation.groups` package"""
+
+from .models import Group, GroupRole
diff --git a/gn_auth/auth/authorisation/groups/data.py b/gn_auth/auth/authorisation/groups/data.py
new file mode 100644
index 0000000..ee6f70e
--- /dev/null
+++ b/gn_auth/auth/authorisation/groups/data.py
@@ -0,0 +1,106 @@
+"""Handles the resource objects' data."""
+from MySQLdb.cursors import DictCursor
+
+from gn3 import db_utils as gn3db
+from gn3.auth import db as authdb
+from gn3.auth.authorisation.groups import Group
+from gn3.auth.authorisation.checks import authorised_p
+from gn3.auth.authorisation.errors import NotFoundError
+
+def __fetch_mrna_data_by_ids__(
+ conn: gn3db.Connection, dataset_ids: tuple[str, ...]) -> tuple[
+ dict, ...]:
+ """Fetch mRNA Assay data by ID."""
+ with conn.cursor(DictCursor) as cursor:
+ paramstr = ", ".join(["%s"] * len(dataset_ids))
+ cursor.execute(
+ "SELECT psf.Id, psf.Name AS dataset_name, "
+ "psf.FullName AS dataset_fullname, "
+ "ifiles.GN_AccesionId AS accession_id FROM ProbeSetFreeze AS psf "
+ "INNER JOIN InfoFiles AS ifiles ON psf.Name=ifiles.InfoPageName "
+ f"WHERE psf.Id IN ({paramstr})",
+ dataset_ids)
+ res = cursor.fetchall()
+ if res:
+ return tuple(dict(row) for row in res)
+ raise NotFoundError("Could not find mRNA Assay data with the given ID.")
+
+def __fetch_geno_data_by_ids__(
+ conn: gn3db.Connection, dataset_ids: tuple[str, ...]) -> tuple[
+ dict, ...]:
+ """Fetch genotype data by ID."""
+ with conn.cursor(DictCursor) as cursor:
+ paramstr = ", ".join(["%s"] * len(dataset_ids))
+ cursor.execute(
+ "SELECT gf.Id, gf.Name AS dataset_name, "
+ "gf.FullName AS dataset_fullname, "
+ "ifiles.GN_AccesionId AS accession_id FROM GenoFreeze AS gf "
+ "INNER JOIN InfoFiles AS ifiles ON gf.Name=ifiles.InfoPageName "
+ f"WHERE gf.Id IN ({paramstr})",
+ dataset_ids)
+ res = cursor.fetchall()
+ if res:
+ return tuple(dict(row) for row in res)
+ raise NotFoundError("Could not find Genotype data with the given ID.")
+
+def __fetch_pheno_data_by_ids__(
+ conn: gn3db.Connection, dataset_ids: tuple[str, ...]) -> tuple[
+ dict, ...]:
+ """Fetch phenotype data by ID."""
+ with conn.cursor(DictCursor) as cursor:
+ paramstr = ", ".join(["%s"] * len(dataset_ids))
+ cursor.execute(
+ "SELECT pxf.Id, iset.InbredSetName, pf.Id AS dataset_id, "
+ "pf.Name AS dataset_name, pf.FullName AS dataset_fullname, "
+ "ifiles.GN_AccesionId AS accession_id "
+ "FROM PublishXRef AS pxf "
+ "INNER JOIN InbredSet AS iset ON pxf.InbredSetId=iset.InbredSetId "
+ "INNER JOIN PublishFreeze AS pf ON iset.InbredSetId=pf.InbredSetId "
+ "INNER JOIN InfoFiles AS ifiles ON pf.Name=ifiles.InfoPageName "
+ f"WHERE pxf.Id IN ({paramstr})",
+ dataset_ids)
+ res = cursor.fetchall()
+ if res:
+ return tuple(dict(row) for row in res)
+ raise NotFoundError(
+ "Could not find Phenotype/Publish data with the given IDs.")
+
+def __fetch_data_by_id(
+ conn: gn3db.Connection, dataset_type: str,
+ dataset_ids: tuple[str, ...]) -> tuple[dict, ...]:
+ """Fetch data from MySQL by IDs."""
+ fetch_fns = {
+ "mrna": __fetch_mrna_data_by_ids__,
+ "genotype": __fetch_geno_data_by_ids__,
+ "phenotype": __fetch_pheno_data_by_ids__
+ }
+ return fetch_fns[dataset_type](conn, dataset_ids)
+
+@authorised_p(("system:data:link-to-group",),
+ error_description=(
+ "You do not have sufficient privileges to link data to (a) "
+ "group(s)."),
+ oauth2_scope="profile group resource")
+def link_data_to_group(
+ authconn: authdb.DbConnection, gn3conn: gn3db.Connection,
+ dataset_type: str, dataset_ids: tuple[str, ...], group: Group) -> tuple[
+ dict, ...]:
+ """Link the given data to the specified group."""
+ the_data = __fetch_data_by_id(gn3conn, dataset_type, dataset_ids)
+ with authdb.cursor(authconn) as cursor:
+ params = tuple({
+ "group_id": str(group.group_id), "dataset_type": {
+ "mrna": "mRNA", "genotype": "Genotype",
+ "phenotype": "Phenotype"
+ }[dataset_type],
+ "dataset_or_trait_id": item["Id"],
+ "dataset_name": item["dataset_name"],
+ "dataset_fullname": item["dataset_fullname"],
+ "accession_id": item["accession_id"]
+ } for item in the_data)
+ cursor.executemany(
+ "INSERT INTO linked_group_data VALUES"
+ "(:group_id, :dataset_type, :dataset_or_trait_id, :dataset_name, "
+ ":dataset_fullname, :accession_id)",
+ params)
+ return params
diff --git a/gn_auth/auth/authorisation/groups/models.py b/gn_auth/auth/authorisation/groups/models.py
new file mode 100644
index 0000000..5a3ae50
--- /dev/null
+++ b/gn_auth/auth/authorisation/groups/models.py
@@ -0,0 +1,400 @@
+"""Handle the management of resource/user groups."""
+import json
+from uuid import UUID, uuid4
+from functools import reduce
+from typing import Any, Sequence, Iterable, Optional, NamedTuple
+
+from flask import g
+from pymonad.maybe import Just, Maybe, Nothing
+
+from gn3.auth import db
+from gn3.auth.dictify import dictify
+from gn3.auth.authentication.users import User, user_by_id
+
+from ..checks import authorised_p
+from ..privileges import Privilege
+from ..errors import NotFoundError, AuthorisationError, InconsistencyError
+from ..roles.models import (
+ Role, create_role, check_user_editable, revoke_user_role_by_name,
+ assign_user_role_by_name)
+
+class Group(NamedTuple):
+ """Class representing a group."""
+ group_id: UUID
+ group_name: str
+ group_metadata: dict[str, Any]
+
+ def dictify(self):
+ """Return a dict representation of `Group` objects."""
+ return {
+ "group_id": self.group_id, "group_name": self.group_name,
+ "group_metadata": self.group_metadata
+ }
+
+DUMMY_GROUP = Group(
+ group_id=UUID("77cee65b-fe29-4383-ae41-3cb3b480cc70"),
+ group_name="GN3_DUMMY_GROUP",
+ group_metadata={
+ "group-description": "This is a dummy group to use as a placeholder"
+ })
+
+class GroupRole(NamedTuple):
+ """Class representing a role tied/belonging to a group."""
+ group_role_id: UUID
+ group: Group
+ role: Role
+
+ def dictify(self) -> dict[str, Any]:
+ """Return a dict representation of `GroupRole` objects."""
+ return {
+ "group_role_id": self.group_role_id, "group": dictify(self.group),
+ "role": dictify(self.role)
+ }
+
+class GroupCreationError(AuthorisationError):
+ """Raised whenever a group creation fails"""
+
+class MembershipError(AuthorisationError):
+ """Raised when there is an error with a user's membership to a group."""
+
+ def __init__(self, user: User, groups: Sequence[Group]):
+ """Initialise the `MembershipError` exception object."""
+ groups_str = ", ".join(group.group_name for group in groups)
+ error_description = (
+ f"User '{user.name} ({user.email})' is a member of {len(groups)} "
+ f"groups ({groups_str})")
+ super().__init__(f"{type(self).__name__}: {error_description}.")
+
+def user_membership(conn: db.DbConnection, user: User) -> Sequence[Group]:
+ """Returns all the groups that a member belongs to"""
+ query = (
+ "SELECT groups.group_id, group_name, groups.group_metadata "
+ "FROM group_users INNER JOIN groups "
+ "ON group_users.group_id=groups.group_id "
+ "WHERE group_users.user_id=?")
+ with db.cursor(conn) as cursor:
+ cursor.execute(query, (str(user.user_id),))
+ groups = tuple(Group(row[0], row[1], json.loads(row[2]))
+ for row in cursor.fetchall())
+
+ return groups
+
+@authorised_p(
+ privileges = ("system:group:create-group",),
+ error_description = (
+ "You do not have the appropriate privileges to enable you to "
+ "create a new group."),
+ oauth2_scope = "profile group")
+def create_group(
+ conn: db.DbConnection, group_name: str, group_leader: User,
+ group_description: Optional[str] = None) -> Group:
+ """Create a new group."""
+ user_groups = user_membership(conn, group_leader)
+ if len(user_groups) > 0:
+ raise MembershipError(group_leader, user_groups)
+
+ with db.cursor(conn) as cursor:
+ new_group = save_group(
+ cursor, group_name,(
+ {"group_description": group_description}
+ if group_description else {}))
+ add_user_to_group(cursor, new_group, group_leader)
+ revoke_user_role_by_name(cursor, group_leader, "group-creator")
+ assign_user_role_by_name(cursor, group_leader, "group-leader")
+ return new_group
+
+@authorised_p(("group:role:create-role",),
+ error_description="Could not create the group role")
+def create_group_role(
+ conn: db.DbConnection, group: Group, role_name: str,
+ privileges: Iterable[Privilege]) -> GroupRole:
+ """Create a role attached to a group."""
+ with db.cursor(conn) as cursor:
+ group_role_id = uuid4()
+ role = create_role(cursor, role_name, privileges)
+ cursor.execute(
+ ("INSERT INTO group_roles(group_role_id, group_id, role_id) "
+ "VALUES(?, ?, ?)"),
+ (str(group_role_id), str(group.group_id), str(role.role_id)))
+
+ return GroupRole(group_role_id, group, role)
+
+def authenticated_user_group(conn) -> Maybe:
+ """
+ Returns the currently authenticated user's group.
+
+ Look into returning a Maybe object.
+ """
+ user = g.user
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("SELECT groups.* FROM group_users "
+ "INNER JOIN groups ON group_users.group_id=groups.group_id "
+ "WHERE group_users.user_id = ?"),
+ (str(user.user_id),))
+ groups = tuple(Group(UUID(row[0]), row[1], json.loads(row[2] or "{}"))
+ for row in cursor.fetchall())
+
+ if len(groups) > 1:
+ raise MembershipError(user, groups)
+
+ if len(groups) == 1:
+ return Just(groups[0])
+
+ return Nothing
+
+def user_group(conn: db.DbConnection, user: User) -> Maybe[Group]:
+ """Returns the given user's group"""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("SELECT groups.group_id, groups.group_name, groups.group_metadata "
+ "FROM group_users "
+ "INNER JOIN groups ON group_users.group_id=groups.group_id "
+ "WHERE group_users.user_id = ?"),
+ (str(user.user_id),))
+ groups = tuple(
+ Group(UUID(row[0]), row[1], json.loads(row[2] or "{}"))
+ for row in cursor.fetchall())
+
+ if len(groups) > 1:
+ raise MembershipError(user, groups)
+
+ if len(groups) == 1:
+ return Just(groups[0])
+
+ return Nothing
+
+def is_group_leader(conn: db.DbConnection, user: User, group: Group) -> bool:
+ """Check whether the given `user` is the leader of `group`."""
+
+ ugroup = user_group(conn, user).maybe(
+ False, lambda val: val) # type: ignore[arg-type, misc]
+ if not group:
+ # User cannot be a group leader if not a member of ANY group
+ return False
+
+ if not ugroup == group:
+ # User cannot be a group leader if not a member of THIS group
+ return False
+
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("SELECT roles.role_name FROM user_roles LEFT JOIN roles "
+ "ON user_roles.role_id = roles.role_id WHERE user_id = ?"),
+ (str(user.user_id),))
+ role_names = tuple(row[0] for row in cursor.fetchall())
+
+ return "group-leader" in role_names
+
+def all_groups(conn: db.DbConnection) -> Maybe[Sequence[Group]]:
+ """Retrieve all existing groups"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM groups")
+ res = cursor.fetchall()
+ if res:
+ return Just(tuple(
+ Group(row["group_id"], row["group_name"],
+ json.loads(row["group_metadata"])) for row in res))
+
+ return Nothing
+
+def save_group(
+ cursor: db.DbCursor, group_name: str,
+ group_metadata: dict[str, Any]) -> Group:
+ """Save a group to db"""
+ the_group = Group(uuid4(), group_name, group_metadata)
+ cursor.execute(
+ ("INSERT INTO groups "
+ "VALUES(:group_id, :group_name, :group_metadata) "
+ "ON CONFLICT (group_id) DO UPDATE SET "
+ "group_name=:group_name, group_metadata=:group_metadata"),
+ {"group_id": str(the_group.group_id), "group_name": the_group.group_name,
+ "group_metadata": json.dumps(the_group.group_metadata)})
+ return the_group
+
+def add_user_to_group(cursor: db.DbCursor, the_group: Group, user: User):
+ """Add `user` to `the_group` as a member."""
+ cursor.execute(
+ ("INSERT INTO group_users VALUES (:group_id, :user_id) "
+ "ON CONFLICT (group_id, user_id) DO NOTHING"),
+ {"group_id": str(the_group.group_id), "user_id": str(user.user_id)})
+
+@authorised_p(
+ privileges = ("system:group:view-group",),
+ error_description = (
+ "You do not have the appropriate privileges to access the list of users"
+ " in the group."))
+def group_users(conn: db.DbConnection, group_id: UUID) -> Iterable[User]:
+ """Retrieve all users that are members of group with id `group_id`."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT u.* FROM group_users AS gu INNER JOIN users AS u "
+ "ON gu.user_id = u.user_id WHERE gu.group_id=:group_id",
+ {"group_id": str(group_id)})
+ results = cursor.fetchall()
+
+ return (User(UUID(row["user_id"]), row["email"], row["name"])
+ for row in results)
+
+@authorised_p(
+ privileges = ("system:group:view-group",),
+ error_description = (
+ "You do not have the appropriate privileges to access the group."))
+def group_by_id(conn: db.DbConnection, group_id: UUID) -> Group:
+ """Retrieve a group by its ID"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM groups WHERE group_id=:group_id",
+ {"group_id": str(group_id)})
+ row = cursor.fetchone()
+ if row:
+ return Group(
+ UUID(row["group_id"]),
+ row["group_name"],
+ json.loads(row["group_metadata"]))
+
+ raise NotFoundError(f"Could not find group with ID '{group_id}'.")
+
+@authorised_p(("system:group:view-group", "system:group:edit-group"),
+ error_description=("You do not have the appropriate authorisation"
+ " to act upon the join requests."),
+ oauth2_scope="profile group")
+def join_requests(conn: db.DbConnection, user: User):
+ """List all the join requests for the user's group."""
+ with db.cursor(conn) as cursor:
+ group = user_group(conn, user).maybe(DUMMY_GROUP, lambda grp: grp)# type: ignore[misc]
+ if group != DUMMY_GROUP and is_group_leader(conn, user, group):
+ cursor.execute(
+ "SELECT gjr.*, u.email, u.name FROM group_join_requests AS gjr "
+ "INNER JOIN users AS u ON gjr.requester_id=u.user_id "
+ "WHERE gjr.group_id=? AND gjr.status='PENDING'",
+ (str(group.group_id),))
+ return tuple(dict(row)for row in cursor.fetchall())
+
+ raise AuthorisationError(
+ "You do not have the appropriate authorisation to access the "
+ "group's join requests.")
+
+@authorised_p(("system:group:view-group", "system:group:edit-group"),
+ error_description=("You do not have the appropriate authorisation"
+ " to act upon the join requests."),
+ oauth2_scope="profile group")
+def accept_reject_join_request(
+ conn: db.DbConnection, request_id: UUID, user: User, status: str) -> dict:
+ """Accept/Reject a join request."""
+ assert status in ("ACCEPTED", "REJECTED"), f"Invalid status '{status}'."
+ with db.cursor(conn) as cursor:
+ group = user_group(conn, user).maybe(DUMMY_GROUP, lambda grp: grp) # type: ignore[misc]
+ cursor.execute("SELECT * FROM group_join_requests WHERE request_id=?",
+ (str(request_id),))
+ row = cursor.fetchone()
+ if row:
+ if group.group_id == UUID(row["group_id"]):
+ try:
+ the_user = user_by_id(conn, UUID(row["requester_id"]))
+ if status == "ACCEPTED":
+ add_user_to_group(cursor, group, the_user)
+ revoke_user_role_by_name(cursor, the_user, "group-creator")
+ cursor.execute(
+ "UPDATE group_join_requests SET status=? "
+ "WHERE request_id=?",
+ (status, str(request_id)))
+ return {"request_id": request_id, "status": status}
+ except NotFoundError as nfe:
+ raise InconsistencyError(
+ "Could not find user associated with join request."
+ ) from nfe
+ raise AuthorisationError(
+ "You cannot act on other groups join requests")
+ raise NotFoundError(f"Could not find request with ID '{request_id}'")
+
+def __organise_privileges__(acc, row):
+ role_id = UUID(row["role_id"])
+ role = acc.get(role_id, False)
+ if role:
+ return {
+ **acc,
+ role_id: Role(
+ role.role_id, role.role_name,
+ bool(int(row["user_editable"])),
+ role.privileges + (
+ Privilege(row["privilege_id"],
+ row["privilege_description"]),))
+ }
+ return {
+ **acc,
+ role_id: Role(
+ UUID(row["role_id"]), row["role_name"],
+ bool(int(row["user_editable"])),
+ (Privilege(row["privilege_id"], row["privilege_description"]),))
+ }
+
+# @authorised_p(("group:role:view",),
+# "Insufficient privileges to view role",
+# oauth2_scope="profile group role")
+def group_role_by_id(
+ conn: db.DbConnection, group: Group, group_role_id: UUID) -> GroupRole:
+ """Retrieve GroupRole from id by its `group_role_id`."""
+ ## TODO: do privileges check before running actual query
+ ## the check commented out above doesn't work correctly
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT gr.group_role_id, r.*, p.* "
+ "FROM group_roles AS gr "
+ "INNER JOIN roles AS r ON gr.role_id=r.role_id "
+ "INNER JOIN role_privileges AS rp ON rp.role_id=r.role_id "
+ "INNER JOIN privileges AS p ON p.privilege_id=rp.privilege_id "
+ "WHERE gr.group_role_id=? AND gr.group_id=?",
+ (str(group_role_id), str(group.group_id)))
+ rows = cursor.fetchall()
+ if rows:
+ roles: tuple[Role,...] = tuple(reduce(
+ __organise_privileges__, rows, {}).values())
+ assert len(roles) == 1
+ return GroupRole(group_role_id, group, roles[0])
+ raise NotFoundError(
+ f"Group role with ID '{group_role_id}' does not exist.")
+
+@authorised_p(("group:role:edit-role",),
+ "You do not have the privilege to edit a role.",
+ oauth2_scope="profile group role")
+def add_privilege_to_group_role(conn: db.DbConnection, group_role: GroupRole,
+ privilege: Privilege) -> GroupRole:
+ """Add `privilege` to `group_role`."""
+ ## TODO: do privileges check.
+ check_user_editable(group_role.role)
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "INSERT INTO role_privileges(role_id,privilege_id) "
+ "VALUES (?, ?) ON CONFLICT (role_id, privilege_id) "
+ "DO NOTHING",
+ (str(group_role.role.role_id), str(privilege.privilege_id)))
+ return GroupRole(
+ group_role.group_role_id,
+ group_role.group,
+ Role(group_role.role.role_id,
+ group_role.role.role_name,
+ group_role.role.user_editable,
+ group_role.role.privileges + (privilege,)))
+
+@authorised_p(("group:role:edit-role",),
+ "You do not have the privilege to edit a role.",
+ oauth2_scope="profile group role")
+def delete_privilege_from_group_role(
+ conn: db.DbConnection, group_role: GroupRole,
+ privilege: Privilege) -> GroupRole:
+ """Delete `privilege` to `group_role`."""
+ ## TODO: do privileges check.
+ check_user_editable(group_role.role)
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "DELETE FROM role_privileges WHERE "
+ "role_id=? AND privilege_id=?",
+ (str(group_role.role.role_id), str(privilege.privilege_id)))
+ return GroupRole(
+ group_role.group_role_id,
+ group_role.group,
+ Role(group_role.role.role_id,
+ group_role.role.role_name,
+ group_role.role.user_editable,
+ tuple(priv for priv in group_role.role.privileges
+ if priv != privilege)))
diff --git a/gn_auth/auth/authorisation/groups/views.py b/gn_auth/auth/authorisation/groups/views.py
new file mode 100644
index 0000000..628df36
--- /dev/null
+++ b/gn_auth/auth/authorisation/groups/views.py
@@ -0,0 +1,431 @@
+"""The views/routes for the `gn3.auth.authorisation.groups` package."""
+import uuid
+import datetime
+from typing import Iterable
+from functools import partial
+
+from MySQLdb.cursors import DictCursor
+from flask import request, jsonify, Response, Blueprint, current_app
+
+from gn3.auth import db
+from gn3 import db_utils as gn3db
+
+from gn3.auth.dictify import dictify
+from gn3.auth.db_utils import with_db_connection
+
+from .data import link_data_to_group
+from .models import (
+ Group, user_group, all_groups, DUMMY_GROUP, GroupRole, group_by_id,
+ join_requests, group_role_by_id, GroupCreationError,
+ accept_reject_join_request, group_users as _group_users,
+ create_group as _create_group, add_privilege_to_group_role,
+ delete_privilege_from_group_role, create_group_role as _create_group_role)
+
+from ..roles.models import Role
+from ..roles.models import user_roles
+
+from ..checks import authorised_p
+from ..privileges import Privilege, privileges_by_ids
+from ..errors import InvalidData, NotFoundError, AuthorisationError
+
+from ...authentication.users import User
+from ...authentication.oauth2.resource_server import require_oauth
+
+groups = Blueprint("groups", __name__)
+
+@groups.route("/list", methods=["GET"])
+@require_oauth("profile group")
+def list_groups():
+ """Return the list of groups that exist."""
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ the_groups = all_groups(conn)
+
+ return jsonify(the_groups.maybe(
+ [], lambda grps: [dictify(grp) for grp in grps]))
+
+@groups.route("/create", methods=["POST"])
+@require_oauth("profile group")
+def create_group():
+ """Create a new group."""
+ with require_oauth.acquire("profile group") as the_token:
+ group_name=request.form.get("group_name", "").strip()
+ if not bool(group_name):
+ raise GroupCreationError("Could not create the group.")
+
+ db_uri = current_app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ user = the_token.user
+ new_group = _create_group(
+ conn, group_name, user, request.form.get("group_description"))
+ return jsonify({
+ **dictify(new_group), "group_leader": dictify(user)
+ })
+
+@groups.route("/members/<uuid:group_id>", methods=["GET"])
+@require_oauth("profile group")
+def group_members(group_id: uuid.UUID) -> Response:
+ """Retrieve all the members of a group."""
+ with require_oauth.acquire("profile group") as the_token:# pylint: disable=[unused-variable]
+ db_uri = current_app.config["AUTH_DB"]
+ ## Check that user has appropriate privileges and remove the pylint disable above
+ with db.connection(db_uri) as conn:
+ return jsonify(tuple(
+ dictify(user) for user in _group_users(conn, group_id)))
+
+@groups.route("/requests/join/<uuid:group_id>", methods=["POST"])
+@require_oauth("profile group")
+def request_to_join(group_id: uuid.UUID) -> Response:
+ """Request to join a group."""
+ def __request__(conn: db.DbConnection, user: User, group_id: uuid.UUID,
+ message: str):
+ with db.cursor(conn) as cursor:
+ group = user_group(conn, user).maybe(# type: ignore[misc]
+ False, lambda grp: grp)# type: ignore[arg-type]
+ if group:
+ error = AuthorisationError(
+ "You cannot request to join a new group while being a "
+ "member of an existing group.")
+ error.error_code = 400
+ raise error
+ request_id = uuid.uuid4()
+ cursor.execute(
+ "INSERT INTO group_join_requests VALUES "
+ "(:request_id, :group_id, :user_id, :ts, :status, :msg)",
+ {
+ "request_id": str(request_id),
+ "group_id": str(group_id),
+ "user_id": str(user.user_id),
+ "ts": datetime.datetime.now().timestamp(),
+ "status": "PENDING",
+ "msg": message
+ })
+ return {
+ "request_id": request_id,
+ "message": "Successfully sent the join request."
+ }
+
+ with require_oauth.acquire("profile group") as the_token:
+ form = request.form
+ results = with_db_connection(partial(
+ __request__, user=the_token.user, group_id=group_id, message=form.get(
+ "message", "I hereby request that you add me to your group.")))
+ return jsonify(results)
+
+@groups.route("/requests/join/list", methods=["GET"])
+@require_oauth("profile group")
+def list_join_requests() -> Response:
+ """List the pending join requests."""
+ with require_oauth.acquire("profile group") as the_token:
+ return jsonify(with_db_connection(partial(
+ join_requests, user=the_token.user)))
+
+@groups.route("/requests/join/accept", methods=["POST"])
+@require_oauth("profile group")
+def accept_join_requests() -> Response:
+ """Accept a join request."""
+ with require_oauth.acquire("profile group") as the_token:
+ form = request.form
+ request_id = uuid.UUID(form.get("request_id"))
+ return jsonify(with_db_connection(partial(
+ accept_reject_join_request, request_id=request_id,
+ user=the_token.user, status="ACCEPTED")))
+
+@groups.route("/requests/join/reject", methods=["POST"])
+@require_oauth("profile group")
+def reject_join_requests() -> Response:
+ """Reject a join request."""
+ with require_oauth.acquire("profile group") as the_token:
+ form = request.form
+ request_id = uuid.UUID(form.get("request_id"))
+ return jsonify(with_db_connection(partial(
+ accept_reject_join_request, request_id=request_id,
+ user=the_token.user, status="REJECTED")))
+
+def unlinked_mrna_data(
+ conn: db.DbConnection, group: Group) -> tuple[dict, ...]:
+ """
+ Retrieve all mRNA Assay data linked to a group but not linked to any
+ resource.
+ """
+ query = (
+ "SELECT lmd.* FROM linked_mrna_data lmd "
+ "LEFT JOIN mrna_resources mr ON lmd.data_link_id=mr.data_link_id "
+ "WHERE lmd.group_id=? AND mr.data_link_id IS NULL")
+ with db.cursor(conn) as cursor:
+ cursor.execute(query, (str(group.group_id),))
+ return tuple(dict(row) for row in cursor.fetchall())
+
+def unlinked_genotype_data(
+ conn: db.DbConnection, group: Group) -> tuple[dict, ...]:
+ """
+ Retrieve all genotype data linked to a group but not linked to any resource.
+ """
+ query = (
+ "SELECT lgd.* FROM linked_genotype_data lgd "
+ "LEFT JOIN genotype_resources gr ON lgd.data_link_id=gr.data_link_id "
+ "WHERE lgd.group_id=? AND gr.data_link_id IS NULL")
+ with db.cursor(conn) as cursor:
+ cursor.execute(query, (str(group.group_id),))
+ return tuple(dict(row) for row in cursor.fetchall())
+
+def unlinked_phenotype_data(
+ authconn: db.DbConnection, gn3conn: gn3db.Connection,
+ group: Group) -> tuple[dict, ...]:
+ """
+ Retrieve all phenotype data linked to a group but not linked to any
+ resource.
+ """
+ with db.cursor(authconn) as authcur, gn3conn.cursor(DictCursor) as gn3cur:
+ authcur.execute(
+ "SELECT lpd.* FROM linked_phenotype_data AS lpd "
+ "LEFT JOIN phenotype_resources AS pr "
+ "ON lpd.data_link_id=pr.data_link_id "
+ "WHERE lpd.group_id=? AND pr.data_link_id IS NULL",
+ (str(group.group_id),))
+ results = authcur.fetchall()
+ ids: dict[tuple[str, ...], str] = {
+ (
+ row["SpeciesId"], row["InbredSetId"], row["PublishFreezeId"],
+ row["PublishXRefId"]): row["data_link_id"]
+ for row in results
+ }
+ if len(ids.keys()) < 1:
+ return tuple()
+ paramstr = ", ".join(["(%s, %s, %s, %s)"] * len(ids.keys()))
+ gn3cur.execute(
+ "SELECT spc.SpeciesId, spc.SpeciesName, iset.InbredSetId, "
+ "iset.InbredSetName, pf.Id AS PublishFreezeId, "
+ "pf.Name AS dataset_name, pf.FullName AS dataset_fullname, "
+ "pf.ShortName AS dataset_shortname, pxr.Id AS PublishXRefId, "
+ "pub.PubMed_ID, pub.Title, pub.Year, "
+ "phen.Pre_publication_description, "
+ "phen.Post_publication_description, phen.Original_description "
+ "FROM "
+ "Species AS spc "
+ "INNER JOIN InbredSet AS iset "
+ "ON spc.SpeciesId=iset.SpeciesId "
+ "INNER JOIN PublishFreeze AS pf "
+ "ON iset.InbredSetId=pf.InbredSetId "
+ "INNER JOIN PublishXRef AS pxr "
+ "ON pf.InbredSetId=pxr.InbredSetId "
+ "INNER JOIN Publication AS pub "
+ "ON pxr.PublicationId=pub.Id "
+ "INNER JOIN Phenotype AS phen "
+ "ON pxr.PhenotypeId=phen.Id "
+ "WHERE (spc.SpeciesId, iset.InbredSetId, pf.Id, pxr.Id) "
+ f"IN ({paramstr})",
+ tuple(item for sublist in ids.keys() for item in sublist))
+ return tuple({
+ **{key: value for key, value in row.items() if key not in
+ ("Post_publication_description", "Pre_publication_description",
+ "Original_description")},
+ "description": (
+ row["Post_publication_description"] or
+ row["Pre_publication_description"] or
+ row["Original_description"]),
+ "data_link_id": ids[tuple(str(row[key]) for key in (
+ "SpeciesId", "InbredSetId", "PublishFreezeId",
+ "PublishXRefId"))]
+ } for row in gn3cur.fetchall())
+
+@groups.route("/<string:resource_type>/unlinked-data")
+@require_oauth("profile group resource")
+def unlinked_data(resource_type: str) -> Response:
+ """View data linked to the group but not linked to any resource."""
+ if resource_type not in ("all", "mrna", "genotype", "phenotype"):
+ raise AuthorisationError(f"Invalid resource type {resource_type}")
+
+ with require_oauth.acquire("profile group resource") as the_token:
+ db_uri = current_app.config["AUTH_DB"]
+ gn3db_uri = current_app.config["SQL_URI"]
+ with (db.connection(db_uri) as authconn,
+ gn3db.database_connection(gn3db_uri) as gn3conn):
+ ugroup = user_group(authconn, the_token.user).maybe(# type: ignore[misc]
+ DUMMY_GROUP, lambda grp: grp)
+ if ugroup == DUMMY_GROUP:
+ return jsonify(tuple())
+
+ unlinked_fns = {
+ "mrna": unlinked_mrna_data,
+ "genotype": unlinked_genotype_data,
+ "phenotype": lambda conn, grp: partial(
+ unlinked_phenotype_data, gn3conn=gn3conn)(
+ authconn=conn, group=grp)
+ }
+ return jsonify(tuple(
+ dict(row) for row in unlinked_fns[resource_type](
+ authconn, ugroup)))
+
+ return jsonify(tuple())
+
+@groups.route("/data/link", methods=["POST"])
+@require_oauth("profile group resource")
+def link_data() -> Response:
+ """Link selected data to specified group."""
+ with require_oauth.acquire("profile group resource") as _the_token:
+ form = request.form
+ group_id = uuid.UUID(form["group_id"])
+ dataset_ids = form.getlist("dataset_ids")
+ dataset_type = form.get("dataset_type")
+ if dataset_type not in ("mrna", "genotype", "phenotype"):
+ raise InvalidData("Unexpected dataset type requested!")
+ def __link__(conn: db.DbConnection):
+ group = group_by_id(conn, group_id)
+ with gn3db.database_connection(current_app.config["SQL_URI"]) as gn3conn:
+ return link_data_to_group(
+ conn, gn3conn, dataset_type, dataset_ids, group)
+
+ return jsonify(with_db_connection(__link__))
+
+@groups.route("/roles", methods=["GET"])
+@require_oauth("profile group")
+def group_roles():
+ """Return a list of all available group roles."""
+ with require_oauth.acquire("profile group role") as the_token:
+ def __list_roles__(conn: db.DbConnection):
+ ## TODO: Check that user has appropriate privileges
+ with db.cursor(conn) as cursor:
+ group = user_group(conn, the_token.user).maybe(# type: ignore[misc]
+ DUMMY_GROUP, lambda grp: grp)
+ if group == DUMMY_GROUP:
+ return tuple()
+ cursor.execute(
+ "SELECT gr.group_role_id, r.* "
+ "FROM group_roles AS gr INNER JOIN roles AS r "
+ "ON gr.role_id=r.role_id "
+ "WHERE group_id=?",
+ (str(group.group_id),))
+ return tuple(
+ GroupRole(uuid.UUID(row["group_role_id"]),
+ group,
+ Role(uuid.UUID(row["role_id"]),
+ row["role_name"],
+ bool(int(row["user_editable"])),
+ tuple()))
+ for row in cursor.fetchall())
+ return jsonify(tuple(
+ dictify(role) for role in with_db_connection(__list_roles__)))
+
+@groups.route("/privileges", methods=["GET"])
+@require_oauth("profile group")
+def group_privileges():
+ """Return a list of all available group roles."""
+ with require_oauth.acquire("profile group role") as the_token:
+ def __list_privileges__(conn: db.DbConnection) -> Iterable[Privilege]:
+ ## TODO: Check that user has appropriate privileges
+ this_user_roles = user_roles(conn, the_token.user)
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM privileges "
+ "WHERE privilege_id LIKE 'group:%'")
+ group_level_roles = tuple(
+ Privilege(row["privilege_id"], row["privilege_description"])
+ for row in cursor.fetchall())
+ return tuple(privilege for arole in this_user_roles
+ for privilege in arole.privileges) + group_level_roles
+ return jsonify(tuple(
+ dictify(priv) for priv in with_db_connection(__list_privileges__)))
+
+
+
+@groups.route("/role/create", methods=["POST"])
+@require_oauth("profile group")
+def create_group_role():
+ """Create a new group role."""
+ with require_oauth.acquire("profile group role") as the_token:
+ ## TODO: Check that user has appropriate privileges
+ @authorised_p(("group:role:create-role",),
+ "You do not have the privilege to create new roles",
+ oauth2_scope="profile group role")
+ def __create__(conn: db.DbConnection) -> GroupRole:
+ ## TODO: Check user cannot assign any privilege they don't have.
+ form = request.form
+ role_name = form.get("role_name", "").strip()
+ privileges_ids = form.getlist("privileges[]")
+ if len(role_name) == 0:
+ raise InvalidData("Role name not provided!")
+ if len(privileges_ids) == 0:
+ raise InvalidData(
+ "At least one privilege needs to be provided.")
+
+ group = user_group(conn, the_token.user).maybe(# type: ignore[misc]
+ DUMMY_GROUP, lambda grp: grp)
+
+ if group == DUMMY_GROUP:
+ raise AuthorisationError(
+ "A user without a group cannot create a new role.")
+ privileges = privileges_by_ids(conn, tuple(privileges_ids))
+ if len(privileges_ids) != len(privileges):
+ raise InvalidData(
+ f"{len(privileges_ids) - len(privileges)} of the selected "
+ "privileges were not found in the database.")
+
+ return _create_group_role(conn, group, role_name, privileges)
+
+ return jsonify(with_db_connection(__create__))
+
+@groups.route("/role/<uuid:group_role_id>", methods=["GET"])
+@require_oauth("profile group")
+def view_group_role(group_role_id: uuid.UUID):
+ """Return the details of the given role."""
+ with require_oauth.acquire("profile group role") as the_token:
+ def __group_role__(conn: db.DbConnection) -> GroupRole:
+ group = user_group(conn, the_token.user).maybe(#type: ignore[misc]
+ DUMMY_GROUP, lambda grp: grp)
+
+ if group == DUMMY_GROUP:
+ raise AuthorisationError(
+ "A user without a group cannot view group roles.")
+ return group_role_by_id(conn, group, group_role_id)
+ return jsonify(dictify(with_db_connection(__group_role__)))
+
+def __add_remove_priv_to_from_role__(conn: db.DbConnection,
+ group_role_id: uuid.UUID,
+ direction: str,
+ user: User) -> GroupRole:
+ assert direction in ("ADD", "DELETE")
+ group = user_group(conn, user).maybe(# type: ignore[misc]
+ DUMMY_GROUP, lambda grp: grp)
+
+ if group == DUMMY_GROUP:
+ raise AuthorisationError(
+ "You need to be a member of a group to edit roles.")
+ try:
+ privilege_id = request.form.get("privilege_id", "")
+ assert bool(privilege_id), "Privilege to add must be provided."
+ privileges = privileges_by_ids(conn, (privilege_id,))
+ if len(privileges) == 0:
+ raise NotFoundError("Privilege not found.")
+ dir_fns = {
+ "ADD": add_privilege_to_group_role,
+ "DELETE": delete_privilege_from_group_role
+ }
+ return dir_fns[direction](
+ conn,
+ group_role_by_id(conn, group, group_role_id),
+ privileges[0])
+ except AssertionError as aerr:
+ raise InvalidData(aerr.args[0]) from aerr
+
+@groups.route("/role/<uuid:group_role_id>/privilege/add", methods=["POST"])
+@require_oauth("profile group")
+def add_priv_to_role(group_role_id: uuid.UUID) -> Response:
+ """Add privilege to group role."""
+ with require_oauth.acquire("profile group role") as the_token:
+ return jsonify({
+ **dictify(with_db_connection(partial(
+ __add_remove_priv_to_from_role__, group_role_id=group_role_id,
+ direction="ADD", user=the_token.user))),
+ "description": "Privilege added successfully"
+ })
+
+@groups.route("/role/<uuid:group_role_id>/privilege/delete", methods=["POST"])
+@require_oauth("profile group")
+def delete_priv_from_role(group_role_id: uuid.UUID) -> Response:
+ """Delete privilege from group role."""
+ with require_oauth.acquire("profile group role") as the_token:
+ return jsonify({
+ **dictify(with_db_connection(partial(
+ __add_remove_priv_to_from_role__, group_role_id=group_role_id,
+ direction="DELETE", user=the_token.user))),
+ "description": "Privilege deleted successfully"
+ })
diff --git a/gn_auth/auth/authorisation/privileges.py b/gn_auth/auth/authorisation/privileges.py
new file mode 100644
index 0000000..dbb4129
--- /dev/null
+++ b/gn_auth/auth/authorisation/privileges.py
@@ -0,0 +1,47 @@
+"""Handle privileges"""
+from typing import Any, Iterable, NamedTuple
+
+from gn3.auth import db
+from gn3.auth.authentication.users import User
+
+class Privilege(NamedTuple):
+ """Class representing a privilege: creates immutable objects."""
+ privilege_id: str
+ privilege_description: str
+
+ def dictify(self) -> dict[str, Any]:
+ """Return a dict representation of `Privilege` objects."""
+ return {
+ "privilege_id": self.privilege_id,
+ "privilege_description": self.privilege_description
+ }
+
+def user_privileges(conn: db.DbConnection, user: User) -> Iterable[Privilege]:
+ """Fetch the user's privileges from the database."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("SELECT p.privilege_id, p.privilege_description "
+ "FROM user_roles AS ur "
+ "INNER JOIN role_privileges AS rp ON ur.role_id=rp.role_id "
+ "INNER JOIN privileges AS p ON rp.privilege_id=p.privilege_id "
+ "WHERE ur.user_id=?"),
+ (str(user.user_id),))
+ results = cursor.fetchall()
+
+ return (Privilege(row[0], row[1]) for row in results)
+
+def privileges_by_ids(
+ conn: db.DbConnection, privileges_ids: tuple[str, ...]) -> tuple[
+ Privilege, ...]:
+ """Fetch privileges by their ids."""
+ if len(privileges_ids) == 0:
+ return tuple()
+
+ with db.cursor(conn) as cursor:
+ clause = ", ".join(["?"] * len(privileges_ids))
+ cursor.execute(
+ f"SELECT * FROM privileges WHERE privilege_id IN ({clause})",
+ privileges_ids)
+ return tuple(
+ Privilege(row["privilege_id"], row["privilege_description"])
+ for row in cursor.fetchall())
diff --git a/gn_auth/auth/authorisation/resources/__init__.py b/gn_auth/auth/authorisation/resources/__init__.py
new file mode 100644
index 0000000..869ab60
--- /dev/null
+++ b/gn_auth/auth/authorisation/resources/__init__.py
@@ -0,0 +1,2 @@
+"""Initialise the `gn3.auth.authorisation.resources` package."""
+from .models import Resource, ResourceCategory
diff --git a/gn_auth/auth/authorisation/resources/checks.py b/gn_auth/auth/authorisation/resources/checks.py
new file mode 100644
index 0000000..fafde76
--- /dev/null
+++ b/gn_auth/auth/authorisation/resources/checks.py
@@ -0,0 +1,47 @@
+"""Handle authorisation checks for resources"""
+from uuid import UUID
+from functools import reduce
+from typing import Sequence
+
+from gn3.auth import db
+from gn3.auth.authentication.users import User
+
+def __organise_privileges_by_resource_id__(rows):
+ def __organise__(privs, row):
+ resource_id = UUID(row["resource_id"])
+ return {
+ **privs,
+ resource_id: (row["privilege_id"],) + privs.get(
+ resource_id, tuple())
+ }
+ return reduce(__organise__, rows, {})
+
+def authorised_for(conn: db.DbConnection, user: User, privileges: tuple[str],
+ resource_ids: Sequence[UUID]) -> dict[UUID, bool]:
+ """
+ Check whether `user` is authorised to access `resources` according to given
+ `privileges`.
+ """
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ ("SELECT guror.*, rp.privilege_id FROM "
+ "group_user_roles_on_resources AS guror "
+ "INNER JOIN group_roles AS gr ON "
+ "(guror.group_id=gr.group_id AND guror.role_id=gr.role_id) "
+ "INNER JOIN roles AS r ON gr.role_id=r.role_id "
+ "INNER JOIN role_privileges AS rp ON r.role_id=rp.role_id "
+ "WHERE guror.user_id=? "
+ f"AND guror.resource_id IN ({', '.join(['?']*len(resource_ids))})"
+ f"AND rp.privilege_id IN ({', '.join(['?']*len(privileges))})"),
+ ((str(user.user_id),) + tuple(
+ str(r_id) for r_id in resource_ids) + tuple(privileges)))
+ resource_privileges = __organise_privileges_by_resource_id__(
+ cursor.fetchall())
+ authorised = tuple(resource_id for resource_id, res_privileges
+ in resource_privileges.items()
+ if all(priv in res_privileges
+ for priv in privileges))
+ return {
+ resource_id: resource_id in authorised
+ for resource_id in resource_ids
+ }
diff --git a/gn_auth/auth/authorisation/resources/models.py b/gn_auth/auth/authorisation/resources/models.py
new file mode 100644
index 0000000..b301a93
--- /dev/null
+++ b/gn_auth/auth/authorisation/resources/models.py
@@ -0,0 +1,579 @@
+"""Handle the management of resources."""
+import json
+import sqlite3
+from uuid import UUID, uuid4
+from functools import reduce, partial
+from typing import Any, Dict, Sequence, Optional, NamedTuple
+
+from gn3.auth import db
+from gn3.auth.dictify import dictify
+from gn3.auth.authentication.users import User
+from gn3.auth.db_utils import with_db_connection
+
+from .checks import authorised_for
+
+from ..checks import authorised_p
+from ..errors import NotFoundError, AuthorisationError
+from ..groups.models import (
+ Group, GroupRole, user_group, group_by_id, is_group_leader)
+
+class MissingGroupError(AuthorisationError):
+ """Raised for any resource operation without a group."""
+
+class ResourceCategory(NamedTuple):
+ """Class representing a resource category."""
+ resource_category_id: UUID
+ resource_category_key: str
+ resource_category_description: str
+
+ def dictify(self) -> dict[str, Any]:
+ """Return a dict representation of `ResourceCategory` objects."""
+ return {
+ "resource_category_id": self.resource_category_id,
+ "resource_category_key": self.resource_category_key,
+ "resource_category_description": self.resource_category_description
+ }
+
+class Resource(NamedTuple):
+ """Class representing a resource."""
+ group: Group
+ resource_id: UUID
+ resource_name: str
+ resource_category: ResourceCategory
+ public: bool
+ resource_data: Sequence[dict[str, Any]] = tuple()
+
+ def dictify(self) -> dict[str, Any]:
+ """Return a dict representation of `Resource` objects."""
+ return {
+ "group": dictify(self.group), "resource_id": self.resource_id,
+ "resource_name": self.resource_name,
+ "resource_category": dictify(self.resource_category),
+ "public": self.public,
+ "resource_data": self.resource_data
+ }
+
+def __assign_resource_owner_role__(cursor, resource, user):
+ """Assign `user` the 'Resource Owner' role for `resource`."""
+ cursor.execute(
+ "SELECT gr.* FROM group_roles AS gr INNER JOIN roles AS r "
+ "ON gr.role_id=r.role_id WHERE r.role_name='resource-owner' "
+ "AND gr.group_id=?",
+ (str(resource.group.group_id),))
+ role = cursor.fetchone()
+ if not role:
+ cursor.execute("SELECT * FROM roles WHERE role_name='resource-owner'")
+ role = cursor.fetchone()
+ cursor.execute(
+ "INSERT INTO group_roles VALUES "
+ "(:group_role_id, :group_id, :role_id)",
+ {"group_role_id": str(uuid4()),
+ "group_id": str(resource.group.group_id),
+ "role_id": role["role_id"]})
+
+ cursor.execute(
+ "INSERT INTO group_user_roles_on_resources "
+ "VALUES ("
+ ":group_id, :user_id, :role_id, :resource_id"
+ ")",
+ {"group_id": str(resource.group.group_id),
+ "user_id": str(user.user_id),
+ "role_id": role["role_id"],
+ "resource_id": str(resource.resource_id)})
+
+@authorised_p(("group:resource:create-resource",),
+ error_description="Insufficient privileges to create a resource",
+ oauth2_scope="profile resource")
+def create_resource(
+ conn: db.DbConnection, resource_name: str,
+ resource_category: ResourceCategory, user: User,
+ public: bool) -> Resource:
+ """Create a resource item."""
+ with db.cursor(conn) as cursor:
+ group = user_group(conn, user).maybe(
+ False, lambda grp: grp)# type: ignore[misc, arg-type]
+ if not group:
+ raise MissingGroupError(
+ "User with no group cannot create a resource.")
+ resource = Resource(
+ group, uuid4(), resource_name, resource_category, public)
+ cursor.execute(
+ "INSERT INTO resources VALUES (?, ?, ?, ?, ?)",
+ (str(resource.group.group_id), str(resource.resource_id),
+ resource_name,
+ str(resource.resource_category.resource_category_id),
+ 1 if resource.public else 0))
+ __assign_resource_owner_role__(cursor, resource, user)
+
+ return resource
+
+def resource_category_by_id(
+ conn: db.DbConnection, category_id: UUID) -> ResourceCategory:
+ """Retrieve a resource category by its ID."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT * FROM resource_categories WHERE "
+ "resource_category_id=?",
+ (str(category_id),))
+ results = cursor.fetchone()
+ if results:
+ return ResourceCategory(
+ UUID(results["resource_category_id"]),
+ results["resource_category_key"],
+ results["resource_category_description"])
+
+ raise NotFoundError(
+ f"Could not find a ResourceCategory with ID '{category_id}'")
+
+def resource_categories(conn: db.DbConnection) -> Sequence[ResourceCategory]:
+ """Retrieve all available resource categories"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM resource_categories")
+ return tuple(
+ ResourceCategory(UUID(row[0]), row[1], row[2])
+ for row in cursor.fetchall())
+ return tuple()
+
+def public_resources(conn: db.DbConnection) -> Sequence[Resource]:
+ """List all resources marked as public"""
+ categories = {
+ str(cat.resource_category_id): cat for cat in resource_categories(conn)
+ }
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM resources WHERE public=1")
+ results = cursor.fetchall()
+ group_uuids = tuple(row[0] for row in results)
+ query = ("SELECT * FROM groups WHERE group_id IN "
+ f"({', '.join(['?'] * len(group_uuids))})")
+ cursor.execute(query, group_uuids)
+ groups = {
+ row[0]: Group(
+ UUID(row[0]), row[1], json.loads(row[2] or "{}"))
+ for row in cursor.fetchall()
+ }
+ return tuple(
+ Resource(groups[row[0]], UUID(row[1]), row[2], categories[row[3]],
+ bool(row[4]))
+ for row in results)
+
+def group_leader_resources(
+ conn: db.DbConnection, user: User, group: Group,
+ res_categories: Dict[UUID, ResourceCategory]) -> Sequence[Resource]:
+ """Return all the resources available to the group leader"""
+ if is_group_leader(conn, user, group):
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM resources WHERE group_id=?",
+ (str(group.group_id),))
+ return tuple(
+ Resource(group, UUID(row[1]), row[2],
+ res_categories[UUID(row[3])], bool(row[4]))
+ for row in cursor.fetchall())
+ return tuple()
+
+def user_resources(conn: db.DbConnection, user: User) -> Sequence[Resource]:
+ """List the resources available to the user"""
+ categories = { # Repeated in `public_resources` function
+ cat.resource_category_id: cat for cat in resource_categories(conn)
+ }
+ with db.cursor(conn) as cursor:
+ def __all_resources__(group) -> Sequence[Resource]:
+ gl_resources = group_leader_resources(conn, user, group, categories)
+
+ cursor.execute(
+ ("SELECT resources.* FROM group_user_roles_on_resources "
+ "LEFT JOIN resources "
+ "ON group_user_roles_on_resources.resource_id=resources.resource_id "
+ "WHERE group_user_roles_on_resources.group_id = ? "
+ "AND group_user_roles_on_resources.user_id = ?"),
+ (str(group.group_id), str(user.user_id)))
+ rows = cursor.fetchall()
+ private_res = tuple(
+ Resource(group, UUID(row[1]), row[2], categories[UUID(row[3])],
+ bool(row[4]))
+ for row in rows)
+ return tuple({
+ res.resource_id: res
+ for res in
+ (private_res + gl_resources + public_resources(conn))# type: ignore[operator]
+ }.values())
+
+ # Fix the typing here
+ return user_group(conn, user).map(__all_resources__).maybe(# type: ignore[arg-type,misc]
+ public_resources(conn), lambda res: res)# type: ignore[arg-type,return-value]
+
+def resource_data(conn, resource, offset: int = 0, limit: Optional[int] = None) -> tuple[dict, ...]:
+ """
+ Retrieve the data for `resource`, optionally limiting the number of items.
+ """
+ resource_data_function = {
+ "mrna": mrna_resource_data,
+ "genotype": genotype_resource_data,
+ "phenotype": phenotype_resource_data
+ }
+ with db.cursor(conn) as cursor:
+ return tuple(
+ dict(data_row) for data_row in
+ resource_data_function[
+ resource.resource_category.resource_category_key](
+ cursor, resource.resource_id, offset, limit))
+
+def attach_resource_data(cursor: db.DbCursor, resource: Resource) -> Resource:
+ """Attach the linked data to the resource"""
+ resource_data_function = {
+ "mrna": mrna_resource_data,
+ "genotype": genotype_resource_data,
+ "phenotype": phenotype_resource_data
+ }
+ category = resource.resource_category
+ data_rows = tuple(
+ dict(data_row) for data_row in
+ resource_data_function[category.resource_category_key](
+ cursor, resource.resource_id))
+ return Resource(
+ resource.group, resource.resource_id, resource.resource_name,
+ resource.resource_category, resource.public, data_rows)
+
+def mrna_resource_data(cursor: db.DbCursor,
+ resource_id: UUID,
+ offset: int = 0,
+ limit: Optional[int] = None) -> Sequence[sqlite3.Row]:
+ """Fetch data linked to a mRNA resource"""
+ cursor.execute(
+ (("SELECT * FROM mrna_resources AS mr "
+ "INNER JOIN linked_mrna_data AS lmr "
+ "ON mr.data_link_id=lmr.data_link_id "
+ "WHERE mr.resource_id=?") + (
+ f" LIMIT {limit} OFFSET {offset}" if bool(limit) else "")),
+ (str(resource_id),))
+ return cursor.fetchall()
+
+def genotype_resource_data(
+ cursor: db.DbCursor,
+ resource_id: UUID,
+ offset: int = 0,
+ limit: Optional[int] = None) -> Sequence[sqlite3.Row]:
+ """Fetch data linked to a Genotype resource"""
+ cursor.execute(
+ (("SELECT * FROM genotype_resources AS gr "
+ "INNER JOIN linked_genotype_data AS lgd "
+ "ON gr.data_link_id=lgd.data_link_id "
+ "WHERE gr.resource_id=?") + (
+ f" LIMIT {limit} OFFSET {offset}" if bool(limit) else "")),
+ (str(resource_id),))
+ return cursor.fetchall()
+
+def phenotype_resource_data(
+ cursor: db.DbCursor,
+ resource_id: UUID,
+ offset: int = 0,
+ limit: Optional[int] = None) -> Sequence[sqlite3.Row]:
+ """Fetch data linked to a Phenotype resource"""
+ cursor.execute(
+ ("SELECT * FROM phenotype_resources AS pr "
+ "INNER JOIN linked_phenotype_data AS lpd "
+ "ON pr.data_link_id=lpd.data_link_id "
+ "WHERE pr.resource_id=?") + (
+ f" LIMIT {limit} OFFSET {offset}" if bool(limit) else ""),
+ (str(resource_id),))
+ return cursor.fetchall()
+
+def resource_by_id(
+ conn: db.DbConnection, user: User, resource_id: UUID) -> Resource:
+ """Retrieve a resource by its ID."""
+ if not authorised_for(
+ conn, user, ("group:resource:view-resource",),
+ (resource_id,))[resource_id]:
+ raise AuthorisationError(
+ "You are not authorised to access resource with id "
+ f"'{resource_id}'.")
+
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM resources WHERE resource_id=:id",
+ {"id": str(resource_id)})
+ row = cursor.fetchone()
+ if row:
+ return Resource(
+ group_by_id(conn, UUID(row["group_id"])),
+ UUID(row["resource_id"]), row["resource_name"],
+ resource_category_by_id(conn, row["resource_category_id"]),
+ bool(int(row["public"])))
+
+ raise NotFoundError(f"Could not find a resource with id '{resource_id}'")
+
+def __link_mrna_data_to_resource__(
+ conn: db.DbConnection, resource: Resource, data_link_id: UUID) -> dict:
+ """Link mRNA Assay data with a resource."""
+ with db.cursor(conn) as cursor:
+ params = {
+ "group_id": str(resource.group.group_id),
+ "resource_id": str(resource.resource_id),
+ "data_link_id": str(data_link_id)
+ }
+ cursor.execute(
+ "INSERT INTO mrna_resources VALUES"
+ "(:group_id, :resource_id, :data_link_id)",
+ params)
+ return params
+
+def __link_geno_data_to_resource__(
+ conn: db.DbConnection, resource: Resource, data_link_id: UUID) -> dict:
+ """Link Genotype data with a resource."""
+ with db.cursor(conn) as cursor:
+ params = {
+ "group_id": str(resource.group.group_id),
+ "resource_id": str(resource.resource_id),
+ "data_link_id": str(data_link_id)
+ }
+ cursor.execute(
+ "INSERT INTO genotype_resources VALUES"
+ "(:group_id, :resource_id, :data_link_id)",
+ params)
+ return params
+
+def __link_pheno_data_to_resource__(
+ conn: db.DbConnection, resource: Resource, data_link_id: UUID) -> dict:
+ """Link Phenotype data with a resource."""
+ with db.cursor(conn) as cursor:
+ params = {
+ "group_id": str(resource.group.group_id),
+ "resource_id": str(resource.resource_id),
+ "data_link_id": str(data_link_id)
+ }
+ cursor.execute(
+ "INSERT INTO phenotype_resources VALUES"
+ "(:group_id, :resource_id, :data_link_id)",
+ params)
+ return params
+
+def link_data_to_resource(
+ conn: db.DbConnection, user: User, resource_id: UUID, dataset_type: str,
+ data_link_id: UUID) -> dict:
+ """Link data to resource."""
+ if not authorised_for(
+ conn, user, ("group:resource:edit-resource",),
+ (resource_id,))[resource_id]:
+ raise AuthorisationError(
+ "You are not authorised to link data to resource with id "
+ f"{resource_id}")
+
+ resource = with_db_connection(partial(
+ resource_by_id, user=user, resource_id=resource_id))
+ return {
+ "mrna": __link_mrna_data_to_resource__,
+ "genotype": __link_geno_data_to_resource__,
+ "phenotype": __link_pheno_data_to_resource__,
+ }[dataset_type.lower()](conn, resource, data_link_id)
+
+def __unlink_mrna_data_to_resource__(
+ conn: db.DbConnection, resource: Resource, data_link_id: UUID) -> dict:
+ """Unlink data from mRNA Assay resources"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("DELETE FROM mrna_resources "
+ "WHERE resource_id=? AND data_link_id=?",
+ (str(resource.resource_id), str(data_link_id)))
+ return {
+ "resource_id": str(resource.resource_id),
+ "dataset_type": resource.resource_category.resource_category_key,
+ "data_link_id": data_link_id
+ }
+
+def __unlink_geno_data_to_resource__(
+ conn: db.DbConnection, resource: Resource, data_link_id: UUID) -> dict:
+ """Unlink data from Genotype resources"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("DELETE FROM genotype_resources "
+ "WHERE resource_id=? AND data_link_id=?",
+ (str(resource.resource_id), str(data_link_id)))
+ return {
+ "resource_id": str(resource.resource_id),
+ "dataset_type": resource.resource_category.resource_category_key,
+ "data_link_id": data_link_id
+ }
+
+def __unlink_pheno_data_to_resource__(
+ conn: db.DbConnection, resource: Resource, data_link_id: UUID) -> dict:
+ """Unlink data from Phenotype resources"""
+ with db.cursor(conn) as cursor:
+ cursor.execute("DELETE FROM phenotype_resources "
+ "WHERE resource_id=? AND data_link_id=?",
+ (str(resource.resource_id), str(data_link_id)))
+ return {
+ "resource_id": str(resource.resource_id),
+ "dataset_type": resource.resource_category.resource_category_key,
+ "data_link_id": str(data_link_id)
+ }
+
+def unlink_data_from_resource(
+ conn: db.DbConnection, user: User, resource_id: UUID, data_link_id: UUID):
+ """Unlink data from resource."""
+ if not authorised_for(
+ conn, user, ("group:resource:edit-resource",),
+ (resource_id,))[resource_id]:
+ raise AuthorisationError(
+ "You are not authorised to link data to resource with id "
+ f"{resource_id}")
+
+ resource = with_db_connection(partial(
+ resource_by_id, user=user, resource_id=resource_id))
+ dataset_type = resource.resource_category.resource_category_key
+ return {
+ "mrna": __unlink_mrna_data_to_resource__,
+ "genotype": __unlink_geno_data_to_resource__,
+ "phenotype": __unlink_pheno_data_to_resource__,
+ }[dataset_type.lower()](conn, resource, data_link_id)
+
+def organise_resources_by_category(resources: Sequence[Resource]) -> dict[
+ ResourceCategory, tuple[Resource]]:
+ """Organise the `resources` by their categories."""
+ def __organise__(accumulator, resource):
+ category = resource.resource_category
+ return {
+ **accumulator,
+ category: accumulator.get(category, tuple()) + (resource,)
+ }
+ return reduce(__organise__, resources, {})
+
+def __attach_data__(
+ data_rows: Sequence[sqlite3.Row],
+ resources: Sequence[Resource]) -> Sequence[Resource]:
+ def __organise__(acc, row):
+ resource_id = UUID(row["resource_id"])
+ return {
+ **acc,
+ resource_id: acc.get(resource_id, tuple()) + (dict(row),)
+ }
+ organised: dict[UUID, tuple[dict, ...]] = reduce(__organise__, data_rows, {})
+ return tuple(
+ Resource(
+ resource.group, resource.resource_id, resource.resource_name,
+ resource.resource_category, resource.public,
+ organised.get(resource.resource_id, tuple()))
+ for resource in resources)
+
+def attach_mrna_resources_data(
+ cursor, resources: Sequence[Resource]) -> Sequence[Resource]:
+ """Attach linked data to mRNA Assay resources"""
+ placeholders = ", ".join(["?"] * len(resources))
+ cursor.execute(
+ "SELECT * FROM mrna_resources AS mr INNER JOIN linked_mrna_data AS lmd"
+ " ON mr.data_link_id=lmd.data_link_id "
+ f"WHERE mr.resource_id IN ({placeholders})",
+ tuple(str(resource.resource_id) for resource in resources))
+ return __attach_data__(cursor.fetchall(), resources)
+
+def attach_genotype_resources_data(
+ cursor, resources: Sequence[Resource]) -> Sequence[Resource]:
+ """Attach linked data to Genotype resources"""
+ placeholders = ", ".join(["?"] * len(resources))
+ cursor.execute(
+ "SELECT * FROM genotype_resources AS gr "
+ "INNER JOIN linked_genotype_data AS lgd "
+ "ON gr.data_link_id=lgd.data_link_id "
+ f"WHERE gr.resource_id IN ({placeholders})",
+ tuple(str(resource.resource_id) for resource in resources))
+ return __attach_data__(cursor.fetchall(), resources)
+
+def attach_phenotype_resources_data(
+ cursor, resources: Sequence[Resource]) -> Sequence[Resource]:
+ """Attach linked data to Phenotype resources"""
+ placeholders = ", ".join(["?"] * len(resources))
+ cursor.execute(
+ "SELECT * FROM phenotype_resources AS pr "
+ "INNER JOIN linked_phenotype_data AS lpd "
+ "ON pr.data_link_id=lpd.data_link_id "
+ f"WHERE pr.resource_id IN ({placeholders})",
+ tuple(str(resource.resource_id) for resource in resources))
+ return __attach_data__(cursor.fetchall(), resources)
+
+def attach_resources_data(
+ conn: db.DbConnection, resources: Sequence[Resource]) -> Sequence[
+ Resource]:
+ """Attach linked data for each resource in `resources`"""
+ resource_data_function = {
+ "mrna": attach_mrna_resources_data,
+ "genotype": attach_genotype_resources_data,
+ "phenotype": attach_phenotype_resources_data
+ }
+ organised = organise_resources_by_category(resources)
+ with db.cursor(conn) as cursor:
+ return tuple(
+ resource for categories in
+ (resource_data_function[category.resource_category_key](
+ cursor, rscs)
+ for category, rscs in organised.items())
+ for resource in categories)
+
+@authorised_p(
+ ("group:user:assign-role",),
+ "You cannot assign roles to users for this group.",
+ oauth2_scope="profile group role resource")
+def assign_resource_user(
+ conn: db.DbConnection, resource: Resource, user: User,
+ role: GroupRole) -> dict:
+ """Assign `role` to `user` for the specific `resource`."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "INSERT INTO "
+ "group_user_roles_on_resources(group_id, user_id, role_id, "
+ "resource_id) "
+ "VALUES (?, ?, ?, ?) "
+ "ON CONFLICT (group_id, user_id, role_id, resource_id) "
+ "DO NOTHING",
+ (str(resource.group.group_id), str(user.user_id),
+ str(role.role.role_id), str(resource.resource_id)))
+ return {
+ "resource": dictify(resource),
+ "user": dictify(user),
+ "role": dictify(role),
+ "description": (
+ f"The user '{user.name}'({user.email}) was assigned the "
+ f"'{role.role.role_name}' role on resource with ID "
+ f"'{resource.resource_id}'.")}
+
+@authorised_p(
+ ("group:user:assign-role",),
+ "You cannot assign roles to users for this group.",
+ oauth2_scope="profile group role resource")
+def unassign_resource_user(
+ conn: db.DbConnection, resource: Resource, user: User,
+ role: GroupRole) -> dict:
+ """Assign `role` to `user` for the specific `resource`."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "DELETE FROM group_user_roles_on_resources "
+ "WHERE group_id=? AND user_id=? AND role_id=? AND resource_id=?",
+ (str(resource.group.group_id), str(user.user_id),
+ str(role.role.role_id), str(resource.resource_id)))
+ return {
+ "resource": dictify(resource),
+ "user": dictify(user),
+ "role": dictify(role),
+ "description": (
+ f"The user '{user.name}'({user.email}) had the "
+ f"'{role.role.role_name}' role on resource with ID "
+ f"'{resource.resource_id}' taken away.")}
+
+def save_resource(
+ conn: db.DbConnection, user: User, resource: Resource) -> Resource:
+ """Update an existing resource."""
+ resource_id = resource.resource_id
+ authorised = authorised_for(
+ conn, user, ("group:resource:edit-resource",), (resource_id,))
+ if authorised[resource_id]:
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "UPDATE resources SET "
+ "resource_name=:resource_name, "
+ "public=:public "
+ "WHERE group_id=:group_id "
+ "AND resource_id=:resource_id",
+ {
+ "resource_name": resource.resource_name,
+ "public": 1 if resource.public else 0,
+ "group_id": str(resource.group.group_id),
+ "resource_id": str(resource.resource_id)
+ })
+ return resource
+
+ raise AuthorisationError(
+ "You do not have the appropriate privileges to edit this resource.")
diff --git a/gn_auth/auth/authorisation/resources/views.py b/gn_auth/auth/authorisation/resources/views.py
new file mode 100644
index 0000000..3b2bbeb
--- /dev/null
+++ b/gn_auth/auth/authorisation/resources/views.py
@@ -0,0 +1,272 @@
+"""The views/routes for the resources package"""
+import uuid
+import json
+import sqlite3
+from functools import reduce
+
+from flask import request, jsonify, Response, Blueprint, current_app as app
+
+from gn3.auth.db_utils import with_db_connection
+
+from .checks import authorised_for
+from .models import (
+ Resource, save_resource, resource_data, resource_by_id, resource_categories,
+ assign_resource_user, link_data_to_resource, unassign_resource_user,
+ resource_category_by_id, unlink_data_from_resource,
+ create_resource as _create_resource)
+
+from ..roles import Role
+from ..errors import InvalidData, InconsistencyError, AuthorisationError
+from ..groups.models import Group, GroupRole, group_role_by_id
+
+from ... import db
+from ...dictify import dictify
+from ...authentication.oauth2.resource_server import require_oauth
+from ...authentication.users import User, user_by_id, user_by_email
+
+resources = Blueprint("resources", __name__)
+
+@resources.route("/categories", methods=["GET"])
+@require_oauth("profile group resource")
+def list_resource_categories() -> Response:
+ """Retrieve all resource categories"""
+ db_uri = app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ return jsonify(tuple(
+ dictify(category) for category in resource_categories(conn)))
+
+@resources.route("/create", methods=["POST"])
+@require_oauth("profile group resource")
+def create_resource() -> Response:
+ """Create a new resource"""
+ with require_oauth.acquire("profile group resource") as the_token:
+ form = request.form
+ resource_name = form.get("resource_name")
+ resource_category_id = uuid.UUID(form.get("resource_category"))
+ db_uri = app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ try:
+ resource = _create_resource(
+ conn,
+ resource_name,
+ resource_category_by_id(conn, resource_category_id),
+ the_token.user,
+ (form.get("public") == "on"))
+ return jsonify(dictify(resource))
+ except sqlite3.IntegrityError as sql3ie:
+ if sql3ie.args[0] == ("UNIQUE constraint failed: "
+ "resources.resource_name"):
+ raise InconsistencyError(
+ "You cannot have duplicate resource names.") from sql3ie
+ app.logger.debug(
+ f"{type(sql3ie)=}: {sql3ie=}")
+ raise
+
+@resources.route("/view/<uuid:resource_id>")
+@require_oauth("profile group resource")
+def view_resource(resource_id: uuid.UUID) -> Response:
+ """View a particular resource's details."""
+ with require_oauth.acquire("profile group resource") as the_token:
+ db_uri = app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ return jsonify(dictify(resource_by_id(
+ conn, the_token.user, resource_id)))
+
+def __safe_get_requests_page__(key: str = "page") -> int:
+ """Get the results page if it exists or default to the first page."""
+ try:
+ return abs(int(request.args.get(key, "1"), base=10))
+ except ValueError as _valerr:
+ return 1
+
+def __safe_get_requests_count__(key: str = "count_per_page") -> int:
+ """Get the results page if it exists or default to the first page."""
+ try:
+ count = request.args.get(key, "0")
+ if count != 0:
+ return abs(int(count, base=10))
+ return 0
+ except ValueError as _valerr:
+ return 0
+
+@resources.route("/view/<uuid:resource_id>/data")
+@require_oauth("profile group resource")
+def view_resource_data(resource_id: uuid.UUID) -> Response:
+ """Retrieve a particular resource's data."""
+ with require_oauth.acquire("profile group resource") as the_token:
+ db_uri = app.config["AUTH_DB"]
+ count_per_page = __safe_get_requests_count__("count_per_page")
+ offset = (__safe_get_requests_page__("page") - 1)
+ with db.connection(db_uri) as conn:
+ resource = resource_by_id(conn, the_token.user, resource_id)
+ return jsonify(resource_data(
+ conn,
+ resource,
+ ((offset * count_per_page) if bool(count_per_page) else offset),
+ count_per_page))
+
+@resources.route("/data/link", methods=["POST"])
+@require_oauth("profile group resource")
+def link_data():
+ """Link group data to a specific resource."""
+ try:
+ form = request.form
+ assert "resource_id" in form, "Resource ID not provided."
+ assert "data_link_id" in form, "Data Link ID not provided."
+ assert "dataset_type" in form, "Dataset type not specified"
+ assert form["dataset_type"].lower() in (
+ "mrna", "genotype", "phenotype"), "Invalid dataset type provided."
+
+ with require_oauth.acquire("profile group resource") as the_token:
+ def __link__(conn: db.DbConnection):
+ return link_data_to_resource(
+ conn, the_token.user, uuid.UUID(form["resource_id"]),
+ form["dataset_type"], uuid.UUID(form["data_link_id"]))
+
+ return jsonify(with_db_connection(__link__))
+ except AssertionError as aserr:
+ raise InvalidData(aserr.args[0]) from aserr
+
+
+
+@resources.route("/data/unlink", methods=["POST"])
+@require_oauth("profile group resource")
+def unlink_data():
+ """Unlink data bound to a specific resource."""
+ try:
+ form = request.form
+ assert "resource_id" in form, "Resource ID not provided."
+ assert "data_link_id" in form, "Data Link ID not provided."
+
+ with require_oauth.acquire("profile group resource") as the_token:
+ def __unlink__(conn: db.DbConnection):
+ return unlink_data_from_resource(
+ conn, the_token.user, uuid.UUID(form["resource_id"]),
+ uuid.UUID(form["data_link_id"]))
+ return jsonify(with_db_connection(__unlink__))
+ except AssertionError as aserr:
+ raise InvalidData(aserr.args[0]) from aserr
+
+@resources.route("<uuid:resource_id>/user/list", methods=["GET"])
+@require_oauth("profile group resource")
+def resource_users(resource_id: uuid.UUID):
+ """Retrieve all users with access to the given resource."""
+ with require_oauth.acquire("profile group resource") as the_token:
+ def __the_users__(conn: db.DbConnection):
+ resource = resource_by_id(conn, the_token.user, resource_id)
+ authorised = authorised_for(
+ conn, the_token.user, ("group:resource:edit-resource",),
+ (resource_id,))
+ if authorised.get(resource_id, False):
+ with db.cursor(conn) as cursor:
+ def __organise_users_n_roles__(users_n_roles, row):
+ user_id = uuid.UUID(row["user_id"])
+ user = users_n_roles.get(user_id, {}).get(
+ "user", User(user_id, row["email"], row["name"]))
+ role = GroupRole(
+ uuid.UUID(row["group_role_id"]),
+ resource.group,
+ Role(uuid.UUID(row["role_id"]), row["role_name"],
+ bool(int(row["user_editable"])), tuple()))
+ return {
+ **users_n_roles,
+ user_id: {
+ "user": user,
+ "user_group": Group(
+ uuid.UUID(row["group_id"]), row["group_name"],
+ json.loads(row["group_metadata"])),
+ "roles": users_n_roles.get(
+ user_id, {}).get("roles", tuple()) + (role,)
+ }
+ }
+ cursor.execute(
+ "SELECT g.*, u.*, r.*, gr.group_role_id "
+ "FROM groups AS g INNER JOIN "
+ "group_users AS gu ON g.group_id=gu.group_id "
+ "INNER JOIN users AS u ON gu.user_id=u.user_id "
+ "INNER JOIN group_user_roles_on_resources AS guror "
+ "ON u.user_id=guror.user_id INNER JOIN roles AS r "
+ "ON guror.role_id=r.role_id "
+ "INNER JOIN group_roles AS gr ON r.role_id=gr.role_id "
+ "WHERE guror.resource_id=?",
+ (str(resource_id),))
+ return reduce(__organise_users_n_roles__, cursor.fetchall(), {})
+ raise AuthorisationError(
+ "You do not have sufficient privileges to view the resource "
+ "users.")
+ results = (
+ {
+ "user": dictify(row["user"]),
+ "user_group": dictify(row["user_group"]),
+ "roles": tuple(dictify(role) for role in row["roles"])
+ } for row in (
+ user_row for user_id, user_row
+ in with_db_connection(__the_users__).items()))
+ return jsonify(tuple(results))
+
+@resources.route("<uuid:resource_id>/user/assign", methods=["POST"])
+@require_oauth("profile group resource role")
+def assign_role_to_user(resource_id: uuid.UUID) -> Response:
+ """Assign a role on the specified resource to a user."""
+ with require_oauth.acquire("profile group resource role") as the_token:
+ try:
+ form = request.form
+ group_role_id = form.get("group_role_id", "")
+ user_email = form.get("user_email", "")
+ assert bool(group_role_id), "The role must be provided."
+ assert bool(user_email), "The user email must be provided."
+
+ def __assign__(conn: db.DbConnection) -> dict:
+ resource = resource_by_id(conn, the_token.user, resource_id)
+ user = user_by_email(conn, user_email)
+ return assign_resource_user(
+ conn, resource, user,
+ group_role_by_id(conn, resource.group,
+ uuid.UUID(group_role_id)))
+ except AssertionError as aserr:
+ raise AuthorisationError(aserr.args[0]) from aserr
+
+ return jsonify(with_db_connection(__assign__))
+
+@resources.route("<uuid:resource_id>/user/unassign", methods=["POST"])
+@require_oauth("profile group resource role")
+def unassign_role_to_user(resource_id: uuid.UUID) -> Response:
+ """Unassign a role on the specified resource from a user."""
+ with require_oauth.acquire("profile group resource role") as the_token:
+ try:
+ form = request.form
+ group_role_id = form.get("group_role_id", "")
+ user_id = form.get("user_id", "")
+ assert bool(group_role_id), "The role must be provided."
+ assert bool(user_id), "The user id must be provided."
+
+ def __assign__(conn: db.DbConnection) -> dict:
+ resource = resource_by_id(conn, the_token.user, resource_id)
+ return unassign_resource_user(
+ conn, resource, user_by_id(conn, uuid.UUID(user_id)),
+ group_role_by_id(conn, resource.group,
+ uuid.UUID(group_role_id)))
+ except AssertionError as aserr:
+ raise AuthorisationError(aserr.args[0]) from aserr
+
+ return jsonify(with_db_connection(__assign__))
+
+@resources.route("<uuid:resource_id>/toggle-public", methods=["POST"])
+@require_oauth("profile group resource role")
+def toggle_public(resource_id: uuid.UUID) -> Response:
+ """Make a resource public if it is private, or private if public."""
+ with require_oauth.acquire("profile group resource") as the_token:
+ def __toggle__(conn: db.DbConnection) -> Resource:
+ old_rsc = resource_by_id(conn, the_token.user, resource_id)
+ return save_resource(
+ conn, the_token.user, Resource(
+ old_rsc.group, old_rsc.resource_id, old_rsc.resource_name,
+ old_rsc.resource_category, not old_rsc.public,
+ old_rsc.resource_data))
+
+ resource = with_db_connection(__toggle__)
+ return jsonify({
+ "resource": dictify(resource),
+ "description": (
+ "Made resource public" if resource.public
+ else "Made resource private")})
diff --git a/gn_auth/auth/authorisation/roles/__init__.py b/gn_auth/auth/authorisation/roles/__init__.py
new file mode 100644
index 0000000..293a12f
--- /dev/null
+++ b/gn_auth/auth/authorisation/roles/__init__.py
@@ -0,0 +1,3 @@
+"""Initialise the `gn3.auth.authorisation.roles` package"""
+
+from .models import Role
diff --git a/gn_auth/auth/authorisation/roles/models.py b/gn_auth/auth/authorisation/roles/models.py
new file mode 100644
index 0000000..97e11af
--- /dev/null
+++ b/gn_auth/auth/authorisation/roles/models.py
@@ -0,0 +1,161 @@
+"""Handle management of roles"""
+from uuid import UUID, uuid4
+from functools import reduce
+from typing import Any, Sequence, Iterable, NamedTuple
+
+from pymonad.either import Left, Right, Either
+
+from gn3.auth import db
+from gn3.auth.dictify import dictify
+from gn3.auth.authentication.users import User
+from gn3.auth.authorisation.errors import AuthorisationError
+
+from ..checks import authorised_p
+from ..privileges import Privilege
+from ..errors import NotFoundError
+
+class Role(NamedTuple):
+ """Class representing a role: creates immutable objects."""
+ role_id: UUID
+ role_name: str
+ user_editable: bool
+ privileges: tuple[Privilege, ...]
+
+ def dictify(self) -> dict[str, Any]:
+ """Return a dict representation of `Role` objects."""
+ return {
+ "role_id": self.role_id, "role_name": self.role_name,
+ "user_editable": self.user_editable,
+ "privileges": tuple(dictify(priv) for priv in self.privileges)
+ }
+
+def check_user_editable(role: Role):
+ """Raise an exception if `role` is not user editable."""
+ if not role.user_editable:
+ raise AuthorisationError(
+ f"The role `{role.role_name}` is not user editable.")
+
+@authorised_p(
+ privileges = ("group:role:create-role",),
+ error_description="Could not create role")
+def create_role(
+ cursor: db.DbCursor, role_name: str,
+ privileges: Iterable[Privilege]) -> Role:
+ """
+ Create a new generic role.
+
+ PARAMS:
+ * cursor: A database cursor object - This function could be used as part of
+ a transaction, hence the use of a cursor rather than a connection
+ object.
+ * role_name: The name of the role
+ * privileges: A 'list' of privileges to assign the new role
+
+ RETURNS: An immutable `gn3.auth.authorisation.roles.Role` object
+ """
+ role = Role(uuid4(), role_name, True, tuple(privileges))
+
+ cursor.execute(
+ "INSERT INTO roles(role_id, role_name, user_editable) VALUES (?, ?, ?)",
+ (str(role.role_id), role.role_name, (1 if role.user_editable else 0)))
+ cursor.executemany(
+ "INSERT INTO role_privileges(role_id, privilege_id) VALUES (?, ?)",
+ tuple((str(role.role_id), str(priv.privilege_id))
+ for priv in privileges))
+
+ return role
+
+def __organise_privileges__(roles_dict, privilege_row):
+ """Organise the privileges into their roles."""
+ role_id_str = privilege_row["role_id"]
+ if role_id_str in roles_dict:
+ return {
+ **roles_dict,
+ role_id_str: Role(
+ UUID(role_id_str),
+ privilege_row["role_name"],
+ bool(int(privilege_row["user_editable"])),
+ roles_dict[role_id_str].privileges + (
+ Privilege(privilege_row["privilege_id"],
+ privilege_row["privilege_description"]),))
+ }
+
+ return {
+ **roles_dict,
+ role_id_str: Role(
+ UUID(role_id_str),
+ privilege_row["role_name"],
+ bool(int(privilege_row["user_editable"])),
+ (Privilege(privilege_row["privilege_id"],
+ privilege_row["privilege_description"]),))
+ }
+
+def user_roles(conn: db.DbConnection, user: User) -> Sequence[Role]:
+ """Retrieve non-resource roles assigned to the user."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT r.*, p.* FROM user_roles AS ur INNER JOIN roles AS r "
+ "ON ur.role_id=r.role_id INNER JOIN role_privileges AS rp "
+ "ON r.role_id=rp.role_id INNER JOIN privileges AS p "
+ "ON rp.privilege_id=p.privilege_id WHERE ur.user_id=?",
+ (str(user.user_id),))
+
+ return tuple(
+ reduce(__organise_privileges__, cursor.fetchall(), {}).values())
+ return tuple()
+
+def user_role(conn: db.DbConnection, user: User, role_id: UUID) -> Either:
+ """Retrieve a specific non-resource role assigned to the user."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT r.*, p.* FROM user_roles AS ur INNER JOIN roles AS r "
+ "ON ur.role_id=r.role_id INNER JOIN role_privileges AS rp "
+ "ON r.role_id=rp.role_id INNER JOIN privileges AS p "
+ "ON rp.privilege_id=p.privilege_id "
+ "WHERE ur.user_id=? AND ur.role_id=?",
+ (str(user.user_id), str(role_id)))
+
+ results = cursor.fetchall()
+ if results:
+ return Right(tuple(
+ reduce(__organise_privileges__, results, {}).values())[0])
+ return Left(NotFoundError(
+ f"Could not find role with id '{role_id}'",))
+
+def assign_default_roles(cursor: db.DbCursor, user: User):
+ """Assign `user` some default roles."""
+ cursor.execute(
+ 'SELECT role_id FROM roles WHERE role_name IN '
+ '("group-creator")')
+ role_ids = cursor.fetchall()
+ str_user_id = str(user.user_id)
+ params = tuple(
+ {"user_id": str_user_id, "role_id": row["role_id"]} for row in role_ids)
+ cursor.executemany(
+ ("INSERT INTO user_roles VALUES (:user_id, :role_id)"),
+ params)
+
+def revoke_user_role_by_name(cursor: db.DbCursor, user: User, role_name: str):
+ """Revoke a role from `user` by the role's name"""
+ cursor.execute(
+ "SELECT role_id FROM roles WHERE role_name=:role_name",
+ {"role_name": role_name})
+ role = cursor.fetchone()
+ if role:
+ cursor.execute(
+ ("DELETE FROM user_roles "
+ "WHERE user_id=:user_id AND role_id=:role_id"),
+ {"user_id": str(user.user_id), "role_id": role["role_id"]})
+
+def assign_user_role_by_name(cursor: db.DbCursor, user: User, role_name: str):
+ """Revoke a role from `user` by the role's name"""
+ cursor.execute(
+ "SELECT role_id FROM roles WHERE role_name=:role_name",
+ {"role_name": role_name})
+ role = cursor.fetchone()
+
+ if role:
+ cursor.execute(
+ ("INSERT INTO user_roles VALUES(:user_id, :role_id) "
+ "ON CONFLICT DO NOTHING"),
+ {"user_id": str(user.user_id), "role_id": role["role_id"]})
diff --git a/gn_auth/auth/authorisation/roles/views.py b/gn_auth/auth/authorisation/roles/views.py
new file mode 100644
index 0000000..3670aab
--- /dev/null
+++ b/gn_auth/auth/authorisation/roles/views.py
@@ -0,0 +1,26 @@
+"""The views/routes for the `gn3.auth.authorisation.roles` package."""
+import uuid
+
+from flask import jsonify, Response, Blueprint, current_app
+
+from gn3.auth import db
+from gn3.auth.dictify import dictify
+
+from .models import user_role
+
+from ...authentication.oauth2.resource_server import require_oauth
+
+roles = Blueprint("roles", __name__)
+
+@roles.route("/view/<uuid:role_id>", methods=["GET"])
+@require_oauth("profile role")
+def view_role(role_id: uuid.UUID) -> Response:
+ """Retrieve a user role with id `role_id`"""
+ def __error__(exc: Exception):
+ raise exc
+ with require_oauth.acquire("profile role") as the_token:
+ db_uri = current_app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ the_role = user_role(conn, the_token.user, role_id)
+ return the_role.either(
+ __error__, lambda a_role: jsonify(dictify(a_role)))
diff --git a/gn_auth/auth/authorisation/users/__init__.py b/gn_auth/auth/authorisation/users/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/__init__.py
diff --git a/gn_auth/auth/authorisation/users/admin/__init__.py b/gn_auth/auth/authorisation/users/admin/__init__.py
new file mode 100644
index 0000000..8aa0743
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/admin/__init__.py
@@ -0,0 +1,2 @@
+"""The admin module"""
+from .views import admin
diff --git a/gn_auth/auth/authorisation/users/admin/ui.py b/gn_auth/auth/authorisation/users/admin/ui.py
new file mode 100644
index 0000000..242c7a6
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/admin/ui.py
@@ -0,0 +1,27 @@
+"""UI utilities for the auth system."""
+from functools import wraps
+from flask import flash, url_for, redirect
+
+from gn3.auth.authentication.users import User
+from gn3.auth.db_utils import with_db_connection
+from gn3.auth.authorisation.roles.models import user_roles
+
+from gn3.session import logged_in, session_user, clear_session_info
+
+def is_admin(func):
+ """Verify user is a system admin."""
+ @wraps(func)
+ @logged_in
+ def __admin__(*args, **kwargs):
+ admin_roles = [
+ role for role in with_db_connection(
+ lambda conn: user_roles(
+ conn, User(**session_user())))
+ if role.role_name == "system-administrator"]
+ if len(admin_roles) > 0:
+ return func(*args, **kwargs)
+ flash("Expected a system administrator.", "alert-danger")
+ flash("You have been logged out of the system.", "alert-info")
+ clear_session_info()
+ return redirect(url_for("oauth2.admin.login"))
+ return __admin__
diff --git a/gn_auth/auth/authorisation/users/admin/views.py b/gn_auth/auth/authorisation/users/admin/views.py
new file mode 100644
index 0000000..c9f1887
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/admin/views.py
@@ -0,0 +1,230 @@
+"""UI for admin stuff"""
+import uuid
+import json
+import random
+import string
+from functools import partial
+from datetime import datetime, timezone, timedelta
+
+from email_validator import validate_email, EmailNotValidError
+from flask import (
+ flash,
+ request,
+ url_for,
+ redirect,
+ Blueprint,
+ current_app,
+ render_template)
+
+
+from gn3 import session
+from gn3.auth import db
+from gn3.auth.db_utils import with_db_connection
+
+from gn3.auth.authentication.oauth2.models.oauth2client import (
+ save_client,
+ OAuth2Client,
+ oauth2_clients,
+ client as oauth2_client,
+ delete_client as _delete_client)
+from gn3.auth.authentication.users import (
+ User,
+ user_by_id,
+ valid_login,
+ user_by_email,
+ hash_password)
+
+from .ui import is_admin
+
+admin = Blueprint("admin", __name__)
+
+@admin.before_request
+def update_expires():
+ """Update session expiration."""
+ if session.session_info() and not session.update_expiry():
+ flash("Session has expired. Logging out...", "alert-warning")
+ session.clear_session_info()
+ return redirect(url_for("oauth2.admin.login"))
+ return None
+
+@admin.route("/dashboard", methods=["GET"])
+@is_admin
+def dashboard():
+ """Admin dashboard."""
+ return render_template("admin/dashboard.html")
+
+@admin.route("/login", methods=["GET", "POST"])
+def login():
+ """Log in to GN3 directly without OAuth2 client."""
+ if request.method == "GET":
+ return render_template(
+ "admin/login.html",
+ next_uri=request.args.get("next", "oauth2.admin.dashboard"))
+
+ form = request.form
+ next_uri = form.get("next_uri", "oauth2.admin.dashboard")
+ error_message = "Invalid email or password provided."
+ login_page = redirect(url_for("oauth2.admin.login", next=next_uri))
+ try:
+ email = validate_email(form.get("email", "").strip(),
+ check_deliverability=False)
+ password = form.get("password")
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ user = user_by_email(conn, email["email"])
+ if valid_login(conn, user, password):
+ session.update_session_info(
+ user=user._asdict(),
+ expires=(
+ datetime.now(tz=timezone.utc) + timedelta(minutes=10)))
+ return redirect(url_for(next_uri))
+ flash(error_message, "alert-danger")
+ return login_page
+ except EmailNotValidError as _enve:
+ flash(error_message, "alert-danger")
+ return login_page
+
+@admin.route("/logout", methods=["GET"])
+def logout():
+ """Log out the admin."""
+ if not session.session_info():
+ flash("Not logged in.", "alert-info")
+ return redirect(url_for("oauth2.admin.login"))
+ session.clear_session_info()
+ flash("Logged out", "alert-success")
+ return redirect(url_for("oauth2.admin.login"))
+
+def random_string(length: int = 64) -> str:
+ """Generate a random string."""
+ return "".join(
+ random.choice(string.ascii_letters + string.digits + string.punctuation)
+ for _idx in range(0, length))
+
+def __response_types__(grant_types: tuple[str, ...]) -> tuple[str, ...]:
+ """Compute response types from grant types."""
+ resps = {
+ "password": ("token",),
+ "authorization_code": ("token", "code"),
+ "refresh_token": ("token",)
+ }
+ return tuple(set(
+ resp_typ for types_list
+ in (types for grant, types in resps.items() if grant in grant_types)
+ for resp_typ in types_list))
+
+@admin.route("/register-client", methods=["GET", "POST"])
+@is_admin
+def register_client():
+ """Register an OAuth2 client."""
+ def __list_users__(conn):
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM users")
+ return tuple(
+ User(uuid.UUID(row["user_id"]), row["email"], row["name"])
+ for row in cursor.fetchall())
+ if request.method == "GET":
+ return render_template(
+ "admin/register-client.html",
+ scope=current_app.config["OAUTH2_SCOPE"],
+ users=with_db_connection(__list_users__),
+ current_user=session.session_user())
+
+ form = request.form
+ raw_client_secret = random_string()
+ default_redirect_uri = form["redirect_uri"]
+ grant_types = form.getlist("grants[]")
+ client = OAuth2Client(
+ client_id = uuid.uuid4(),
+ client_secret = hash_password(raw_client_secret),
+ client_id_issued_at = datetime.now(tz=timezone.utc),
+ client_secret_expires_at = datetime.fromtimestamp(0),
+ client_metadata = {
+ "client_name": "GN2 Dev Server",
+ "token_endpoint_auth_method": [
+ "client_secret_post", "client_secret_basic"],
+ "client_type": "confidential",
+ "grant_types": ["password", "authorization_code", "refresh_token"],
+ "default_redirect_uri": default_redirect_uri,
+ "redirect_uris": [default_redirect_uri] + form.get("other_redirect_uri", "").split(),
+ "response_type": __response_types__(tuple(grant_types)),
+ "scope": form.getlist("scope[]")
+ },
+ user = with_db_connection(partial(
+ user_by_id, user_id=uuid.UUID(form["user"])))
+ )
+ client = with_db_connection(partial(save_client, the_client=client))
+ return render_template(
+ "admin/registered-client.html",
+ client=client,
+ client_secret = raw_client_secret)
+
+def __parse_client__(sqlite3_row) -> dict:
+ """Parse the client details into python datatypes."""
+ return {
+ **dict(sqlite3_row),
+ "client_metadata": json.loads(sqlite3_row["client_metadata"])
+ }
+
+@admin.route("/list-client", methods=["GET"])
+@is_admin
+def list_clients():
+ """List all registered OAuth2 clients."""
+ return render_template(
+ "admin/list-oauth2-clients.html",
+ clients=with_db_connection(oauth2_clients))
+
+@admin.route("/view-client/<uuid:client_id>", methods=["GET"])
+@is_admin
+def view_client(client_id: uuid.UUID):
+ """View details of OAuth2 client with given `client_id`."""
+ return render_template(
+ "admin/view-oauth2-client.html",
+ client=with_db_connection(partial(oauth2_client, client_id=client_id)),
+ scope=current_app.config["OAUTH2_SCOPE"])
+
+@admin.route("/edit-client", methods=["POST"])
+@is_admin
+def edit_client():
+ """Edit the details of the given client."""
+ form = request.form
+ the_client = with_db_connection(partial(
+ oauth2_client, client_id=uuid.UUID(form["client_id"])))
+ if the_client.is_nothing():
+ flash("No such client.", "alert-danger")
+ return redirect(url_for("oauth2.admin.list_clients"))
+ the_client = the_client.value
+ client_metadata = {
+ **the_client.client_metadata,
+ "default_redirect_uri": form["default_redirect_uri"],
+ "redirect_uris": list(set(
+ [form["default_redirect_uri"]] +
+ form["other_redirect_uris"].split("\r\n"))),
+ "grants": form.getlist("grants[]"),
+ "scope": form.getlist("scope[]")
+ }
+ with_db_connection(partial(save_client, the_client=OAuth2Client(
+ the_client.client_id,
+ the_client.client_secret,
+ the_client.client_id_issued_at,
+ the_client.client_secret_expires_at,
+ client_metadata,
+ the_client.user)))
+ flash("Client updated.", "alert-success")
+ return redirect(url_for("oauth2.admin.view_client",
+ client_id=the_client.client_id))
+
+@admin.route("/delete-client", methods=["POST"])
+@is_admin
+def delete_client():
+ """Delete the details of the client."""
+ form = request.form
+ the_client = with_db_connection(partial(
+ oauth2_client, client_id=uuid.UUID(form["client_id"])))
+ if the_client.is_nothing():
+ flash("No such client.", "alert-danger")
+ return redirect(url_for("oauth2.admin.list_clients"))
+ the_client = the_client.value
+ with_db_connection(partial(_delete_client, client=the_client))
+ flash((f"Client '{the_client.client_metadata.client_name}' was deleted "
+ "successfully."),
+ "alert-success")
+ return redirect(url_for("oauth2.admin.list_clients"))
diff --git a/gn_auth/auth/authorisation/users/collections/__init__.py b/gn_auth/auth/authorisation/users/collections/__init__.py
new file mode 100644
index 0000000..88ab040
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/collections/__init__.py
@@ -0,0 +1 @@
+"""Package dealing with user collections."""
diff --git a/gn_auth/auth/authorisation/users/collections/models.py b/gn_auth/auth/authorisation/users/collections/models.py
new file mode 100644
index 0000000..7577fa8
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/collections/models.py
@@ -0,0 +1,269 @@
+"""Handle user collections."""
+import json
+from uuid import UUID, uuid4
+from datetime import datetime
+
+from redis import Redis
+from email_validator import validate_email, EmailNotValidError
+
+from gn3.auth.authorisation.errors import InvalidData, NotFoundError
+
+from ..models import User
+
+__OLD_REDIS_COLLECTIONS_KEY__ = "collections"
+__REDIS_COLLECTIONS_KEY__ = "collections2"
+
+class CollectionJSONEncoder(json.JSONEncoder):
+ """Serialise collection objects into JSON."""
+ def default(self, obj):# pylint: disable=[arguments-renamed]
+ if isinstance(obj, UUID):
+ return str(obj)
+ if isinstance(obj, datetime):
+ return obj.strftime("%b %d %Y %I:%M%p")
+ return json.JSONEncoder.default(self, obj)
+
+def __valid_email__(email:str) -> bool:
+ """Check for email validity."""
+ try:
+ validate_email(email, check_deliverability=True)
+ except EmailNotValidError as _enve:
+ return False
+ return True
+
+def __toggle_boolean_field__(
+ rconn: Redis, email: str, field: str):
+ """Toggle the valuen of a boolean field"""
+ mig_dict = json.loads(rconn.hget("migratable-accounts", email) or "{}")
+ if bool(mig_dict):
+ rconn.hset("migratable-accounts", email,
+ json.dumps({**mig_dict, field: not mig_dict.get(field, True)}))
+
+def __build_email_uuid_bridge__(rconn: Redis):
+ """
+ Build a connection between new accounts and old user accounts.
+
+ The only thing that is common between the two is the email address,
+ therefore, we use that to link the two items.
+ """
+ old_accounts = {
+ account["email_address"]: {
+ "user_id": account["user_id"],
+ "collections-migrated": False,
+ "resources_migrated": False
+ } for account in (
+ acct for acct in
+ (json.loads(usr) for usr in rconn.hgetall("users").values())
+ if (bool(acct.get("email_address", False)) and
+ __valid_email__(acct["email_address"])))
+ }
+ if bool(old_accounts):
+ rconn.hset("migratable-accounts", mapping={
+ key: json.dumps(value) for key,value in old_accounts.items()
+ })
+ return old_accounts
+
+def __retrieve_old_accounts__(rconn: Redis) -> dict:
+ accounts = rconn.hgetall("migratable-accounts")
+ if accounts:
+ return {
+ key: json.loads(value) for key, value in accounts.items()
+ }
+ return __build_email_uuid_bridge__(rconn)
+
+def parse_collection(coll: dict) -> dict:
+ """Parse the collection as persisted in redis to a usable python object."""
+ created = coll.get("created", coll.get("created_timestamp"))
+ changed = coll.get("changed", coll.get("changed_timestamp"))
+ return {
+ "id": UUID(coll["id"]),
+ "name": coll["name"],
+ "created": datetime.strptime(created, "%b %d %Y %I:%M%p"),
+ "changed": datetime.strptime(changed, "%b %d %Y %I:%M%p"),
+ "num_members": int(coll["num_members"]),
+ "members": coll["members"]
+ }
+
+def dump_collection(pythoncollection: dict) -> str:
+ """Convert the collection from a python object to a json string."""
+ return json.dumps(pythoncollection, cls=CollectionJSONEncoder)
+
+def __retrieve_old_user_collections__(rconn: Redis, old_user_id: UUID) -> tuple:
+ """Retrieve any old collections relating to the user."""
+ return tuple(parse_collection(coll) for coll in
+ json.loads(rconn.hget(
+ __OLD_REDIS_COLLECTIONS_KEY__, str(old_user_id)) or "[]"))
+
+def user_collections(rconn: Redis, user: User) -> tuple[dict, ...]:
+ """Retrieve current user collections."""
+ collections = tuple(parse_collection(coll) for coll in json.loads(
+ rconn.hget(__REDIS_COLLECTIONS_KEY__, str(user.user_id)) or
+ "[]"))
+ old_accounts = __retrieve_old_accounts__(rconn)
+ if (user.email in old_accounts and
+ not old_accounts[user.email]["collections-migrated"]):
+ old_user_id = old_accounts[user.email]["user_id"]
+ collections = tuple({
+ coll["id"]: coll for coll in (
+ collections + __retrieve_old_user_collections__(
+ rconn, UUID(old_user_id)))
+ }.values())
+ __toggle_boolean_field__(rconn, user.email, "collections-migrated")
+ rconn.hset(
+ __REDIS_COLLECTIONS_KEY__,
+ key=str(user.user_id),
+ value=json.dumps(collections, cls=CollectionJSONEncoder))
+ return collections
+
+def save_collections(rconn: Redis, user: User, collections: tuple[dict, ...]) -> tuple[dict, ...]:
+ """Save the `collections` to redis."""
+ rconn.hset(
+ __REDIS_COLLECTIONS_KEY__,
+ str(user.user_id),
+ json.dumps(collections, cls=CollectionJSONEncoder))
+ return collections
+
+def add_to_user_collections(rconn: Redis, user: User, collection: dict) -> dict:
+ """Add `collection` to list of user collections."""
+ ucolls = user_collections(rconn, user)
+ save_collections(rconn, user, ucolls + (collection,))
+ return collection
+
+def create_collection(rconn: Redis, user: User, name: str, traits: tuple) -> dict:
+ """Create a new collection."""
+ now = datetime.utcnow()
+ return add_to_user_collections(rconn, user, {
+ "id": uuid4(),
+ "name": name,
+ "created": now,
+ "changed": now,
+ "num_members": len(traits),
+ "members": traits
+ })
+
+def get_collection(rconn: Redis, user: User, collection_id: UUID) -> dict:
+ """Retrieve the collection with ID `collection_id`."""
+ colls = tuple(coll for coll in user_collections(rconn, user)
+ if coll["id"] == collection_id)
+ if len(colls) == 0:
+ raise NotFoundError(
+ f"Could not find a collection with ID `{collection_id}` for user "
+ f"with ID `{user.user_id}`")
+ if len(colls) > 1:
+ err = InvalidData(
+ "More than one collection was found having the ID "
+ f"`{collection_id}` for user with ID `{user.user_id}`.")
+ err.error_code = 513
+ raise err
+ return colls[0]
+
+def __raise_if_collections_empty__(user: User, collections: tuple[dict, ...]):
+ """Raise an exception if no collections are found for `user`."""
+ if len(collections) < 1:
+ raise NotFoundError(f"No collections found for user `{user.user_id}`")
+
+def __raise_if_not_single_collection__(
+ user: User, collection_id: UUID, collections: tuple[dict, ...]):
+ """
+ Raise an exception there is zero, or more than one collection for `user`.
+ """
+ if len(collections) == 0:
+ raise NotFoundError(f"No collections found for user `{user.user_id}` "
+ f"with ID `{collection_id}`.")
+ if len(collections) > 1:
+ err = InvalidData(
+ "More than one collection was found having the ID "
+ f"`{collection_id}` for user with ID `{user.user_id}`.")
+ err.error_code = 513
+ raise err
+
+def delete_collections(rconn: Redis,
+ user: User,
+ collection_ids: tuple[UUID, ...]) -> tuple[dict, ...]:
+ """
+ Delete collections with the given `collection_ids` returning the deleted
+ collections.
+ """
+ ucolls = user_collections(rconn, user)
+ save_collections(
+ rconn,
+ user,
+ tuple(coll for coll in ucolls if coll["id"] not in collection_ids))
+ return tuple(coll for coll in ucolls if coll["id"] in collection_ids)
+
+def add_traits(rconn: Redis,
+ user: User,
+ collection_id: UUID,
+ traits: tuple[str, ...]) -> dict:
+ """
+ Add `traits` to the `user` collection identified by `collection_id`.
+
+ Returns: The collection with the new traits added.
+ """
+ ucolls = user_collections(rconn, user)
+ __raise_if_collections_empty__(user, ucolls)
+
+ mod_col = tuple(coll for coll in ucolls if coll["id"] == collection_id)
+ __raise_if_not_single_collection__(user, collection_id, mod_col)
+ new_members = tuple(set(tuple(mod_col[0]["members"]) + traits))
+ new_coll = {
+ **mod_col[0],
+ "members": new_members,
+ "num_members": len(new_members)
+ }
+ save_collections(
+ rconn,
+ user,
+ (tuple(coll for coll in ucolls if coll["id"] != collection_id) +
+ (new_coll,)))
+ return new_coll
+
+def remove_traits(rconn: Redis,
+ user: User,
+ collection_id: UUID,
+ traits: tuple[str, ...]) -> dict:
+ """
+ Remove `traits` from the `user` collection identified by `collection_id`.
+
+ Returns: The collection with the specified `traits` removed.
+ """
+ ucolls = user_collections(rconn, user)
+ __raise_if_collections_empty__(user, ucolls)
+
+ mod_col = tuple(coll for coll in ucolls if coll["id"] == collection_id)
+ __raise_if_not_single_collection__(user, collection_id, mod_col)
+ new_members = tuple(
+ trait for trait in mod_col[0]["members"] if trait not in traits)
+ new_coll = {
+ **mod_col[0],
+ "members": new_members,
+ "num_members": len(new_members)
+ }
+ save_collections(
+ rconn,
+ user,
+ (tuple(coll for coll in ucolls if coll["id"] != collection_id) +
+ (new_coll,)))
+ return new_coll
+
+def change_name(rconn: Redis,
+ user: User,
+ collection_id: UUID,
+ new_name: str) -> dict:
+ """
+ Change the collection's name.
+
+ Returns: The collection with the new name.
+ """
+ ucolls = user_collections(rconn, user)
+ __raise_if_collections_empty__(user, ucolls)
+
+ mod_col = tuple(coll for coll in ucolls if coll["id"] == collection_id)
+ __raise_if_not_single_collection__(user, collection_id, mod_col)
+
+ new_coll = {**mod_col[0], "name": new_name}
+ save_collections(
+ rconn,
+ user,
+ (tuple(coll for coll in ucolls if coll["id"] != collection_id) +
+ (new_coll,)))
+ return new_coll
diff --git a/gn_auth/auth/authorisation/users/collections/views.py b/gn_auth/auth/authorisation/users/collections/views.py
new file mode 100644
index 0000000..1fa25a3
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/collections/views.py
@@ -0,0 +1,239 @@
+"""Views regarding user collections."""
+from uuid import UUID
+
+from redis import Redis
+from flask import jsonify, request, Response, Blueprint, current_app
+
+from gn3.auth import db
+from gn3.auth.db_utils import with_db_connection
+from gn3.auth.authorisation.checks import require_json
+from gn3.auth.authorisation.errors import NotFoundError
+
+from gn3.auth.authentication.users import User, user_by_id
+from gn3.auth.authentication.oauth2.resource_server import require_oauth
+
+from .models import (
+ add_traits,
+ change_name,
+ remove_traits,
+ get_collection,
+ user_collections,
+ save_collections,
+ create_collection,
+ delete_collections as _delete_collections)
+
+collections = Blueprint("collections", __name__)
+
+@collections.route("/list")
+@require_oauth("profile user")
+def list_user_collections() -> Response:
+ """Retrieve the user ids"""
+ with (require_oauth.acquire("profile user") as the_token,
+ Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ return jsonify(user_collections(redisconn, the_token.user))
+
+@collections.route("/<uuid:anon_id>/list")
+def list_anonymous_collections(anon_id: UUID) -> Response:
+ """Fetch anonymous collections"""
+ with Redis.from_url(
+ current_app.config["REDIS_URI"], decode_responses=True) as redisconn:
+ def __list__(conn: db.DbConnection) -> tuple:
+ try:
+ _user = user_by_id(conn, anon_id)
+ current_app.logger.warning(
+ "Fetch collections for authenticated user using the "
+ "`list_user_collections()` endpoint.")
+ return tuple()
+ except NotFoundError as _nfe:
+ return user_collections(
+ redisconn, User(anon_id, "anon@ymous.user", "Anonymous User"))
+
+ return jsonify(with_db_connection(__list__))
+
+@require_oauth("profile user")
+def __new_collection_as_authenticated_user__(redisconn, name, traits):
+ """Create a new collection as an authenticated user."""
+ with require_oauth.acquire("profile user") as token:
+ return create_collection(redisconn, token.user, name, traits)
+
+def __new_collection_as_anonymous_user__(redisconn, name, traits):
+ """Create a new collection as an anonymous user."""
+ return create_collection(redisconn,
+ User(UUID(request.json.get("anon_id")),
+ "anon@ymous.user",
+ "Anonymous User"),
+ name,
+ traits)
+
+@collections.route("/new", methods=["POST"])
+@require_json
+def new_user_collection() -> Response:
+ """Create a new collection."""
+ with (Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ traits = tuple(request.json.get("traits", tuple()))# type: ignore[union-attr]
+ name = request.json.get("name")# type: ignore[union-attr]
+ if bool(request.headers.get("Authorization")):
+ return jsonify(__new_collection_as_authenticated_user__(
+ redisconn, name, traits))
+ return jsonify(__new_collection_as_anonymous_user__(
+ redisconn, name, traits))
+
+@collections.route("/<uuid:collection_id>/view", methods=["POST"])
+@require_json
+def view_collection(collection_id: UUID) -> Response:
+ """View a particular collection"""
+ with (Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ if bool(request.headers.get("Authorization")):
+ with require_oauth.acquire("profile user") as token:
+ return jsonify(get_collection(redisconn,
+ token.user,
+ collection_id))
+ return jsonify(get_collection(
+ redisconn,
+ User(
+ UUID(request.json.get("anon_id")),#type: ignore[union-attr]
+ "anon@ymous.user",
+ "Anonymous User"),
+ collection_id))
+
+@collections.route("/anonymous/import", methods=["POST"])
+@require_json
+@require_oauth("profile user")
+def import_anonymous() -> Response:
+ """Import anonymous collections."""
+ with (require_oauth.acquire("profile user") as token,
+ Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ anon_id = UUID(request.json.get("anon_id"))#type: ignore[union-attr]
+ anon_colls = user_collections(redisconn, User(
+ anon_id, "anon@ymous.user", "Anonymous User"))
+ save_collections(
+ redisconn,
+ token.user,
+ (user_collections(redisconn, token.user) +
+ anon_colls))
+ redisconn.hdel("collections", str(anon_id))
+ return jsonify({
+ "message": f"Import of {len(anon_colls)} was successful."
+ })
+
+@collections.route("/anonymous/delete", methods=["POST"])
+@require_json
+@require_oauth("profile user")
+def delete_anonymous() -> Response:
+ """Delete anonymous collections."""
+ with (require_oauth.acquire("profile user") as _token,
+ Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ anon_id = UUID(request.json.get("anon_id"))#type: ignore[union-attr]
+ anon_colls = user_collections(redisconn, User(
+ anon_id, "anon@ymous.user", "Anonymous User"))
+ redisconn.hdel("collections", str(anon_id))
+ return jsonify({
+ "message": f"Deletion of {len(anon_colls)} was successful."
+ })
+
+@collections.route("/delete", methods=["POST"])
+@require_json
+def delete_collections():
+ """Delete specified collections."""
+ with (Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ coll_ids = tuple(UUID(cid) for cid in request.json["collection_ids"])
+ deleted = _delete_collections(
+ redisconn,
+ User(request.json["anon_id"], "anon@ymous.user", "Anonymous User"),
+ coll_ids)
+ if bool(request.headers.get("Authorization")):
+ with require_oauth.acquire("profile user") as token:
+ deleted = deleted + _delete_collections(
+ redisconn, token.user, coll_ids)
+
+ return jsonify({
+ "message": f"Deleted {len(deleted)} collections."})
+
+@collections.route("/<uuid:collection_id>/traits/remove", methods=["POST"])
+@require_json
+def remove_traits_from_collection(collection_id: UUID) -> Response:
+ """Remove specified traits from collection with ID `collection_id`."""
+ if len(request.json["traits"]) < 1:#type: ignore[index]
+ return jsonify({"message": "No trait to remove from collection."})
+
+ the_traits = tuple(request.json["traits"])#type: ignore[index]
+ with (Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ if not bool(request.headers.get("Authorization")):
+ coll = remove_traits(
+ redisconn,
+ User(request.json["anon_id"],#type: ignore[index]
+ "anon@ymous.user",
+ "Anonymous User"),
+ collection_id,
+ the_traits)
+ else:
+ with require_oauth.acquire("profile user") as token:
+ coll = remove_traits(
+ redisconn, token.user, collection_id, the_traits)
+
+ return jsonify({
+ "message": f"Deleted {len(the_traits)} traits from collection.",
+ "collection": coll
+ })
+
+@collections.route("/<uuid:collection_id>/traits/add", methods=["POST"])
+@require_json
+def add_traits_to_collection(collection_id: UUID) -> Response:
+ """Add specified traits to collection with ID `collection_id`."""
+ if len(request.json["traits"]) < 1:#type: ignore[index]
+ return jsonify({"message": "No trait to add to collection."})
+
+ the_traits = tuple(request.json["traits"])#type: ignore[index]
+ with (Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ if not bool(request.headers.get("Authorization")):
+ coll = add_traits(
+ redisconn,
+ User(request.json["anon_id"],#type: ignore[index]
+ "anon@ymous.user",
+ "Anonymous User"),
+ collection_id,
+ the_traits)
+ else:
+ with require_oauth.acquire("profile user") as token:
+ coll = add_traits(
+ redisconn, token.user, collection_id, the_traits)
+
+ return jsonify({
+ "message": f"Added {len(the_traits)} traits to collection.",
+ "collection": coll
+ })
+
+@collections.route("/<uuid:collection_id>/rename", methods=["POST"])
+@require_json
+def rename_collection(collection_id: UUID) -> Response:
+ """Rename the given collection"""
+ if not bool(request.json["new_name"]):#type: ignore[index]
+ return jsonify({"message": "No new name to change to."})
+
+ new_name = request.json["new_name"]#type: ignore[index]
+ with (Redis.from_url(current_app.config["REDIS_URI"],
+ decode_responses=True) as redisconn):
+ if not bool(request.headers.get("Authorization")):
+ coll = change_name(redisconn,
+ User(UUID(request.json["anon_id"]),#type: ignore[index]
+ "anon@ymous.user",
+ "Anonymous User"),
+ collection_id,
+ new_name)
+ else:
+ with require_oauth.acquire("profile user") as token:
+ coll = change_name(
+ redisconn, token.user, collection_id, new_name)
+
+ return jsonify({
+ "message": "Collection rename successful.",
+ "collection": coll
+ })
diff --git a/gn_auth/auth/authorisation/users/masquerade/__init__.py b/gn_auth/auth/authorisation/users/masquerade/__init__.py
new file mode 100644
index 0000000..69d64f0
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/masquerade/__init__.py
@@ -0,0 +1 @@
+"""Package to deal with masquerading."""
diff --git a/gn_auth/auth/authorisation/users/masquerade/models.py b/gn_auth/auth/authorisation/users/masquerade/models.py
new file mode 100644
index 0000000..9f24b6b
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/masquerade/models.py
@@ -0,0 +1,67 @@
+"""Functions for handling masquerade."""
+from uuid import uuid4
+from functools import wraps
+from datetime import datetime
+
+from flask import current_app as app
+
+from gn3.auth import db
+
+from gn3.auth.authorisation.errors import ForbiddenAccess
+from gn3.auth.authorisation.roles.models import user_roles
+
+from gn3.auth.authentication.users import User
+from gn3.auth.authentication.oauth2.models.oauth2token import (
+ OAuth2Token, save_token)
+
+__FIVE_HOURS__ = (60 * 60 * 5)
+
+def can_masquerade(func):
+ """Security decorator."""
+ @wraps(func)
+ def __checker__(*args, **kwargs):
+ if len(args) == 3:
+ conn, token, _masq_user = args
+ elif len(args) == 2:
+ conn, token = args
+ elif len(args) == 1:
+ conn = args[0]
+ token = kwargs["original_token"]
+ else:
+ conn = kwargs["conn"]
+ token = kwargs["original_token"]
+
+ masq_privs = [priv for role in user_roles(conn, token.user)
+ for priv in role.privileges
+ if priv.privilege_id == "system:user:masquerade"]
+ if len(masq_privs) == 0:
+ raise ForbiddenAccess(
+ "You do not have the ability to masquerade as another user.")
+ return func(*args, **kwargs)
+ return __checker__
+
+@can_masquerade
+def masquerade_as(
+ conn: db.DbConnection,
+ original_token: OAuth2Token,
+ masqueradee: User) -> OAuth2Token:
+ """Get a token that enables `masquerader` to act as `masqueradee`."""
+ token_details = app.config["OAUTH2_SERVER"].generate_token(
+ client=original_token.client,
+ grant_type="authorization_code",
+ user=masqueradee,
+ expires_in=__FIVE_HOURS__,
+ include_refresh_token=True)
+ new_token = OAuth2Token(
+ token_id=uuid4(),
+ client=original_token.client,
+ token_type=token_details["token_type"],
+ access_token=token_details["access_token"],
+ refresh_token=token_details.get("refresh_token"),
+ scope=original_token.scope,
+ revoked=False,
+ issued_at=datetime.now(),
+ expires_in=token_details["expires_in"],
+ user=masqueradee)
+ save_token(conn, new_token)
+ return new_token
diff --git a/gn_auth/auth/authorisation/users/masquerade/views.py b/gn_auth/auth/authorisation/users/masquerade/views.py
new file mode 100644
index 0000000..43286a1
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/masquerade/views.py
@@ -0,0 +1,48 @@
+"""Endpoints for user masquerade"""
+from uuid import UUID
+from functools import partial
+
+from flask import request, jsonify, Response, Blueprint
+
+from gn3.auth.db_utils import with_db_connection
+from gn3.auth.authorisation.errors import InvalidData
+from gn3.auth.authorisation.checks import require_json
+
+from gn3.auth.authentication.users import user_by_id
+from gn3.auth.authentication.oauth2.resource_server import require_oauth
+
+from .models import masquerade_as
+
+masq = Blueprint("masquerade", __name__)
+
+@masq.route("/", methods=["POST"])
+@require_oauth("profile user masquerade")
+@require_json
+def masquerade() -> Response:
+ """Masquerade as a particular user."""
+ with require_oauth.acquire("profile user masquerade") as token:
+ masqueradee_id = UUID(request.json["masquerade_as"])#type: ignore[index]
+ if masqueradee_id == token.user.user_id:
+ raise InvalidData("You are not allowed to masquerade as yourself.")
+
+ masq_user = with_db_connection(partial(
+ user_by_id, user_id=masqueradee_id))
+ def __masq__(conn):
+ new_token = masquerade_as(conn, original_token=token, masqueradee=masq_user)
+ return new_token
+ def __dump_token__(tok):
+ return {
+ key: value for key, value in (tok._asdict().items())
+ if key in ("access_token", "refresh_token", "expires_in",
+ "token_type")
+ }
+ return jsonify({
+ "original": {
+ "user": token.user._asdict(),
+ "token": __dump_token__(token)
+ },
+ "masquerade_as": {
+ "user": masq_user._asdict(),
+ "token": __dump_token__(with_db_connection(__masq__))
+ }
+ })
diff --git a/gn_auth/auth/authorisation/users/models.py b/gn_auth/auth/authorisation/users/models.py
new file mode 100644
index 0000000..89c1d22
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/models.py
@@ -0,0 +1,66 @@
+"""Functions for acting on users."""
+import uuid
+from functools import reduce
+
+from gn3.auth import db
+from gn3.auth.authorisation.roles.models import Role
+from gn3.auth.authorisation.checks import authorised_p
+from gn3.auth.authorisation.privileges import Privilege
+
+from gn3.auth.authentication.users import User
+
+@authorised_p(
+ ("system:user:list",),
+ "You do not have the appropriate privileges to list users.",
+ oauth2_scope="profile user")
+def list_users(conn: db.DbConnection) -> tuple[User, ...]:
+ """List out all users."""
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM users")
+ return tuple(
+ User(uuid.UUID(row["user_id"]), row["email"], row["name"])
+ for row in cursor.fetchall())
+
+def __build_resource_roles__(rows):
+ def __build_roles__(roles, row):
+ role_id = uuid.UUID(row["role_id"])
+ priv = Privilege(row["privilege_id"], row["privilege_description"])
+ role = roles.get(role_id, Role(
+ role_id, row["role_name"], bool(row["user_editable"]), tuple()))
+ return {
+ **roles,
+ role_id: Role(role_id, role.role_name, role.user_editable, role.privileges + (priv,))
+ }
+ def __build__(acc, row):
+ resource_id = uuid.UUID(row["resource_id"])
+ return {
+ **acc,
+ resource_id: __build_roles__(acc.get(resource_id, {}), row)
+ }
+ return {
+ resource_id: tuple(roles.values())
+ for resource_id, roles in reduce(__build__, rows, {}).items()
+ }
+
+# @authorised_p(
+# ("",),
+# ("You do not have the appropriate privileges to view a user's roles on "
+# "resources."))
+def user_resource_roles(conn: db.DbConnection, user: User) -> dict[uuid.UUID, tuple[Role, ...]]:
+ """Fetch all the user's roles on resources."""
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT res.*, rls.*, p.*"
+ "FROM resources AS res INNER JOIN "
+ "group_user_roles_on_resources AS guror "
+ "ON res.resource_id=guror.resource_id "
+ "LEFT JOIN roles AS rls "
+ "ON guror.role_id=rls.role_id "
+ "LEFT JOIN role_privileges AS rp "
+ "ON rls.role_id=rp.role_id "
+ "LEFT JOIN privileges AS p "
+ "ON rp.privilege_id=p.privilege_id "
+ "WHERE guror.user_id = ?",
+ (str(user.user_id),))
+ return __build_resource_roles__(
+ (dict(row) for row in cursor.fetchall()))
diff --git a/gn_auth/auth/authorisation/users/views.py b/gn_auth/auth/authorisation/users/views.py
new file mode 100644
index 0000000..826e222
--- /dev/null
+++ b/gn_auth/auth/authorisation/users/views.py
@@ -0,0 +1,176 @@
+"""User authorisation endpoints."""
+import traceback
+from typing import Any
+from functools import partial
+
+import sqlite3
+from email_validator import validate_email, EmailNotValidError
+from flask import request, jsonify, Response, Blueprint, current_app
+
+from gn3.auth import db
+from gn3.auth.dictify import dictify
+from gn3.auth.db_utils import with_db_connection
+
+from .models import list_users
+from .masquerade.views import masq
+from .collections.views import collections
+
+from ..groups.models import user_group as _user_group
+from ..resources.models import user_resources as _user_resources
+from ..roles.models import assign_default_roles, user_roles as _user_roles
+from ..errors import (
+ NotFoundError, UsernameError, PasswordError, UserRegistrationError)
+
+from ...authentication.oauth2.resource_server import require_oauth
+from ...authentication.users import User, save_user, set_user_password
+from ...authentication.oauth2.models.oauth2token import token_by_access_token
+
+users = Blueprint("users", __name__)
+users.register_blueprint(masq, url_prefix="/masquerade")
+users.register_blueprint(collections, url_prefix="/collections")
+
+@users.route("/", methods=["GET"])
+@require_oauth("profile")
+def user_details() -> Response:
+ """Return user's details."""
+ with require_oauth.acquire("profile") as the_token:
+ user = the_token.user
+ user_dets = {
+ "user_id": user.user_id, "email": user.email, "name": user.name,
+ "group": False
+ }
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ the_group = _user_group(conn, user).maybe(# type: ignore[misc]
+ False, lambda grp: grp)# type: ignore[arg-type]
+ return jsonify({
+ **user_dets,
+ "group": dictify(the_group) if the_group else False
+ })
+
+@users.route("/roles", methods=["GET"])
+@require_oauth("role")
+def user_roles() -> Response:
+ """Return the non-resource roles assigned to the user."""
+ with require_oauth.acquire("role") as token:
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ return jsonify(tuple(
+ dictify(role) for role in _user_roles(conn, token.user)))
+
+def validate_password(password, confirm_password) -> str:
+ """Validate the provided password."""
+ if len(password) < 8:
+ raise PasswordError("The password must be at least 8 characters long.")
+
+ if password != confirm_password:
+ raise PasswordError("Mismatched password values")
+
+ return password
+
+def validate_username(name: str) -> str:
+ """Validate the provides name."""
+ if name == "":
+ raise UsernameError("User's name not provided.")
+
+ return name
+
+def __assert_not_logged_in__(conn: db.DbConnection):
+ bearer = request.headers.get('Authorization')
+ if bearer:
+ token = token_by_access_token(conn, bearer.split(None)[1]).maybe(# type: ignore[misc]
+ False, lambda tok: tok)
+ if token:
+ raise UserRegistrationError(
+ "Cannot register user while authenticated")
+
+@users.route("/register", methods=["POST"])
+def register_user() -> Response:
+ """Register a user."""
+ with db.connection(current_app.config["AUTH_DB"]) as conn:
+ __assert_not_logged_in__(conn)
+
+ try:
+ form = request.form
+ email = validate_email(form.get("email", "").strip(),
+ check_deliverability=True)
+ password = validate_password(
+ form.get("password", "").strip(),
+ form.get("confirm_password", "").strip())
+ user_name = validate_username(form.get("user_name", "").strip())
+ with db.cursor(conn) as cursor:
+ user, _hashed_password = set_user_password(
+ cursor, save_user(
+ cursor, email["email"], user_name), password)
+ assign_default_roles(cursor, user)
+ return jsonify(
+ {
+ "user_id": user.user_id,
+ "email": user.email,
+ "name": user.name
+ })
+ except sqlite3.IntegrityError as sq3ie:
+ current_app.logger.debug(traceback.format_exc())
+ raise UserRegistrationError(
+ "A user with that email already exists") from sq3ie
+ except EmailNotValidError as enve:
+ current_app.logger.debug(traceback.format_exc())
+ raise(UserRegistrationError(f"Email Error: {str(enve)}")) from enve
+
+ raise Exception(
+ "unknown_error", "The system experienced an unexpected error.")
+
+@users.route("/group", methods=["GET"])
+@require_oauth("profile group")
+def user_group() -> Response:
+ """Retrieve the group in which the user is a member."""
+ with require_oauth.acquire("profile group") as the_token:
+ db_uri = current_app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ group = _user_group(conn, the_token.user).maybe(# type: ignore[misc]
+ False, lambda grp: grp)# type: ignore[arg-type]
+
+ if group:
+ return jsonify(dictify(group))
+ raise NotFoundError("User is not a member of any group.")
+
+@users.route("/resources", methods=["GET"])
+@require_oauth("profile resource")
+def user_resources() -> Response:
+ """Retrieve the resources a user has access to."""
+ with require_oauth.acquire("profile resource") as the_token:
+ db_uri = current_app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ return jsonify([
+ dictify(resource) for resource in
+ _user_resources(conn, the_token.user)])
+
+@users.route("group/join-request", methods=["GET"])
+@require_oauth("profile group")
+def user_join_request_exists():
+ """Check whether a user has an active group join request."""
+ def __request_exists__(conn: db.DbConnection, user: User) -> dict[str, Any]:
+ with db.cursor(conn) as cursor:
+ cursor.execute(
+ "SELECT * FROM group_join_requests WHERE requester_id=? AND "
+ "status = 'PENDING'",
+ (str(user.user_id),))
+ res = cursor.fetchone()
+ if res:
+ return {
+ "request_id": res["request_id"],
+ "exists": True
+ }
+ return{
+ "status": "Not found",
+ "exists": False
+ }
+ with require_oauth.acquire("profile group") as the_token:
+ return jsonify(with_db_connection(partial(
+ __request_exists__, user=the_token.user)))
+
+@users.route("/list", methods=["GET"])
+@require_oauth("profile user")
+def list_all_users() -> Response:
+ """List all the users."""
+ with require_oauth.acquire("profile group") as _the_token:
+ return jsonify(tuple(
+ dictify(user) for user in with_db_connection(list_users)))
diff --git a/gn_auth/auth/db.py b/gn_auth/auth/db.py
new file mode 100644
index 0000000..2ba6619
--- /dev/null
+++ b/gn_auth/auth/db.py
@@ -0,0 +1,78 @@
+"""Handle connection to auth database."""
+import sqlite3
+import logging
+import contextlib
+from typing import Any, Callable, Iterator, Protocol
+
+import traceback
+
+class DbConnection(Protocol):
+ """Type annotation for a generic database connection object."""
+ def cursor(self) -> Any:
+ """A cursor object"""
+ ...
+
+ def commit(self) -> Any:
+ """Commit the transaction."""
+ ...
+
+ def rollback(self) -> Any:
+ """Rollback the transaction."""
+ ...
+
+class DbCursor(Protocol):
+ """Type annotation for a generic database cursor object."""
+ def execute(self, *args, **kwargs) -> Any:
+ """Execute a single query"""
+ ...
+
+ def executemany(self, *args, **kwargs) -> Any:
+ """
+ Execute parameterized SQL statement sql against all parameter sequences
+ or mappings found in the sequence parameters.
+ """
+ ...
+
+ def fetchone(self, *args, **kwargs):
+ """Fetch single result if present, or `None`."""
+ ...
+
+ def fetchmany(self, *args, **kwargs):
+ """Fetch many results if present or `None`."""
+ ...
+
+ def fetchall(self, *args, **kwargs):
+ """Fetch all results if present or `None`."""
+ ...
+
+@contextlib.contextmanager
+def connection(db_path: str, row_factory: Callable = sqlite3.Row) -> Iterator[DbConnection]:
+ """Create the connection to the auth database."""
+ logging.debug("SQLite3 DB Path: '%s'.", db_path)
+ conn = sqlite3.connect(db_path)
+ conn.row_factory = row_factory
+ conn.set_trace_callback(logging.debug)
+ conn.execute("PRAGMA foreign_keys = ON")
+ try:
+ yield conn
+ except sqlite3.Error as exc:
+ conn.rollback()
+ logging.debug(traceback.format_exc())
+ raise exc
+ finally:
+ conn.commit()
+ conn.close()
+
+@contextlib.contextmanager
+def cursor(conn: DbConnection) -> Iterator[DbCursor]:
+ """Get a cursor from the given connection to the auth database."""
+ cur = conn.cursor()
+ try:
+ yield cur
+ except sqlite3.Error as exc:
+ conn.rollback()
+ logging.debug(traceback.format_exc())
+ raise exc
+ finally:
+ conn.commit()
+ cur.close()
diff --git a/gn_auth/auth/db_utils.py b/gn_auth/auth/db_utils.py
new file mode 100644
index 0000000..c06b026
--- /dev/null
+++ b/gn_auth/auth/db_utils.py
@@ -0,0 +1,14 @@
+"""Some common auth db utilities"""
+from typing import Any, Callable
+from flask import current_app
+
+from . import db
+
+def with_db_connection(func: Callable[[db.DbConnection], Any]) -> Any:
+ """
+ Takes a function of one argument `func`, whose one argument is a database
+ connection.
+ """
+ db_uri = current_app.config["AUTH_DB"]
+ with db.connection(db_uri) as conn:
+ return func(conn)
diff --git a/gn_auth/auth/dictify.py b/gn_auth/auth/dictify.py
new file mode 100644
index 0000000..f9337f6
--- /dev/null
+++ b/gn_auth/auth/dictify.py
@@ -0,0 +1,12 @@
+"""Module for dictifying objects"""
+
+from typing import Any, Protocol
+
+class Dictifiable(Protocol):# pylint: disable=[too-few-public-methods]
+ """Type annotation for generic object with a `dictify` method."""
+ def dictify(self):
+ """Convert the object to a dict"""
+
+def dictify(obj: Dictifiable) -> dict[str, Any]:
+ """Turn `obj` to a dict representation."""
+ return obj.dictify()
diff --git a/gn_auth/auth/views.py b/gn_auth/auth/views.py
new file mode 100644
index 0000000..56eace7
--- /dev/null
+++ b/gn_auth/auth/views.py
@@ -0,0 +1,21 @@
+"""The Auth(oris|entic)ation routes/views"""
+from flask import Blueprint
+
+from .authentication.oauth2.views import auth
+
+from .authorisation.data.views import data
+from .authorisation.users.views import users
+from .authorisation.users.admin import admin
+from .authorisation.roles.views import roles
+from .authorisation.groups.views import groups
+from .authorisation.resources.views import resources
+
+oauth2 = Blueprint("oauth2", __name__)
+
+oauth2.register_blueprint(auth, url_prefix="/")
+oauth2.register_blueprint(data, url_prefix="/data")
+oauth2.register_blueprint(users, url_prefix="/user")
+oauth2.register_blueprint(roles, url_prefix="/role")
+oauth2.register_blueprint(admin, url_prefix="/admin")
+oauth2.register_blueprint(groups, url_prefix="/group")
+oauth2.register_blueprint(resources, url_prefix="/resource")
diff --git a/migrations/auth/20221103_01_js9ub-initialise-the-auth-entic-oris-ation-database.py b/migrations/auth/20221103_01_js9ub-initialise-the-auth-entic-oris-ation-database.py
new file mode 100644
index 0000000..d511f5d
--- /dev/null
+++ b/migrations/auth/20221103_01_js9ub-initialise-the-auth-entic-oris-ation-database.py
@@ -0,0 +1,19 @@
+"""
+Initialise the auth(entic|oris)ation database.
+"""
+
+from yoyo import step
+
+__depends__ = {} # type: ignore[var-annotated]
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS users(
+ user_id TEXT PRIMARY KEY NOT NULL,
+ email TEXT UNIQUE NOT NULL,
+ name TEXT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS users")
+]
diff --git a/migrations/auth/20221103_02_sGrIs-create-user-credentials-table.py b/migrations/auth/20221103_02_sGrIs-create-user-credentials-table.py
new file mode 100644
index 0000000..48bd663
--- /dev/null
+++ b/migrations/auth/20221103_02_sGrIs-create-user-credentials-table.py
@@ -0,0 +1,20 @@
+"""
+create user_credentials table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221103_01_js9ub-initialise-the-auth-entic-oris-ation-database'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS user_credentials(
+ user_id TEXT PRIMARY KEY,
+ password TEXT NOT NULL,
+ FOREIGN KEY(user_id) REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS user_credentials")
+]
diff --git a/migrations/auth/20221108_01_CoxYh-create-the-groups-table.py b/migrations/auth/20221108_01_CoxYh-create-the-groups-table.py
new file mode 100644
index 0000000..29f92d4
--- /dev/null
+++ b/migrations/auth/20221108_01_CoxYh-create-the-groups-table.py
@@ -0,0 +1,19 @@
+"""
+Create the groups table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221103_02_sGrIs-create-user-credentials-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS groups(
+ group_id TEXT PRIMARY KEY NOT NULL,
+ group_name TEXT NOT NULL,
+ group_metadata TEXT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS groups")
+]
diff --git a/migrations/auth/20221108_02_wxTr9-create-privileges-table.py b/migrations/auth/20221108_02_wxTr9-create-privileges-table.py
new file mode 100644
index 0000000..67720b2
--- /dev/null
+++ b/migrations/auth/20221108_02_wxTr9-create-privileges-table.py
@@ -0,0 +1,18 @@
+"""
+Create privileges table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221108_01_CoxYh-create-the-groups-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE privileges(
+ privilege_id TEXT PRIMARY KEY,
+ privilege_name TEXT NOT NULL
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS privileges")
+]
diff --git a/migrations/auth/20221108_03_Pbhb1-create-resource-categories-table.py b/migrations/auth/20221108_03_Pbhb1-create-resource-categories-table.py
new file mode 100644
index 0000000..ce752ef
--- /dev/null
+++ b/migrations/auth/20221108_03_Pbhb1-create-resource-categories-table.py
@@ -0,0 +1,19 @@
+"""
+Create resource_categories table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221108_02_wxTr9-create-privileges-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE resource_categories(
+ resource_category_id TEXT PRIMARY KEY,
+ resource_category_key TEXT NOT NULL,
+ resource_category_description TEXT NOT NULL
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS resource_categories")
+]
diff --git a/migrations/auth/20221108_04_CKcSL-init-data-in-resource-categories-table.py b/migrations/auth/20221108_04_CKcSL-init-data-in-resource-categories-table.py
new file mode 100644
index 0000000..76ffbef
--- /dev/null
+++ b/migrations/auth/20221108_04_CKcSL-init-data-in-resource-categories-table.py
@@ -0,0 +1,25 @@
+"""
+Init data in resource_categories table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221108_03_Pbhb1-create-resource-categories-table'}
+
+steps = [
+ step(
+ """
+ INSERT INTO resource_categories VALUES
+ ('fad071a3-2fc8-40b8-992b-cdefe7dcac79', 'mrna', 'mRNA Dataset'),
+ ('548d684b-d4d1-46fb-a6d3-51a56b7da1b3', 'phenotype', 'Phenotype (Publish) Dataset'),
+ ('48056f84-a2a6-41ac-8319-0e1e212cba2a', 'genotype', 'Genotype Dataset')
+ """,
+ """
+ DELETE FROM resource_categories WHERE resource_category_id IN
+ (
+ 'fad071a3-2fc8-40b8-992b-cdefe7dcac79',
+ '548d684b-d4d1-46fb-a6d3-51a56b7da1b3',
+ '48056f84-a2a6-41ac-8319-0e1e212cba2a'
+ )
+ """)
+]
diff --git a/migrations/auth/20221109_01_HbD5F-add-resource-meta-field-to-resource-categories-field.py b/migrations/auth/20221109_01_HbD5F-add-resource-meta-field-to-resource-categories-field.py
new file mode 100644
index 0000000..6c829b1
--- /dev/null
+++ b/migrations/auth/20221109_01_HbD5F-add-resource-meta-field-to-resource-categories-field.py
@@ -0,0 +1,17 @@
+"""
+Add 'resource_meta' field to 'resource_categories' field.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221108_04_CKcSL-init-data-in-resource-categories-table'}
+
+steps = [
+ step(
+ """
+ ALTER TABLE resource_categories
+ ADD COLUMN
+ resource_meta TEXT NOT NULL DEFAULT '[]'
+ """,
+ "ALTER TABLE resource_categories DROP COLUMN resource_meta")
+]
diff --git a/migrations/auth/20221110_01_WtZ1I-create-resources-table.py b/migrations/auth/20221110_01_WtZ1I-create-resources-table.py
new file mode 100644
index 0000000..abc8895
--- /dev/null
+++ b/migrations/auth/20221110_01_WtZ1I-create-resources-table.py
@@ -0,0 +1,26 @@
+"""
+Create 'resources' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221109_01_HbD5F-add-resource-meta-field-to-resource-categories-field'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS resources(
+ group_id TEXT NOT NULL,
+ resource_id TEXT NOT NULL,
+ resource_name TEXT NOT NULL UNIQUE,
+ resource_category_id TEXT NOT NULL,
+ PRIMARY KEY(group_id, resource_id),
+ FOREIGN KEY(group_id) REFERENCES groups(group_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(resource_category_id)
+ REFERENCES resource_categories(resource_category_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS resources")
+]
diff --git a/migrations/auth/20221110_05_BaNtL-create-roles-table.py b/migrations/auth/20221110_05_BaNtL-create-roles-table.py
new file mode 100644
index 0000000..51e19e8
--- /dev/null
+++ b/migrations/auth/20221110_05_BaNtL-create-roles-table.py
@@ -0,0 +1,19 @@
+"""
+Create 'roles' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221110_01_WtZ1I-create-resources-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS roles(
+ role_id TEXT NOT NULL PRIMARY KEY,
+ role_name TEXT NOT NULL,
+ user_editable INTEGER NOT NULL DEFAULT 1 CHECK (user_editable=0 or user_editable=1)
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS roles")
+]
diff --git a/migrations/auth/20221110_06_Pq2kT-create-generic-roles-table.py b/migrations/auth/20221110_06_Pq2kT-create-generic-roles-table.py
new file mode 100644
index 0000000..2b55c2b
--- /dev/null
+++ b/migrations/auth/20221110_06_Pq2kT-create-generic-roles-table.py
@@ -0,0 +1,24 @@
+"""
+Create 'generic_roles' table
+
+The roles in this table will be template roles, defining some common roles that
+can be used within the groups.
+
+They could also be used to define system-level roles, though those will not be
+provided to the "common" users.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221110_05_BaNtL-create-roles-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS generic_roles(
+ role_id TEXT PRIMARY KEY,
+ role_name TEXT NOT NULL
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS generic_roles")
+]
diff --git a/migrations/auth/20221110_07_7WGa1-create-role-privileges-table.py b/migrations/auth/20221110_07_7WGa1-create-role-privileges-table.py
new file mode 100644
index 0000000..0d0eeb9
--- /dev/null
+++ b/migrations/auth/20221110_07_7WGa1-create-role-privileges-table.py
@@ -0,0 +1,29 @@
+"""
+Create 'role_privileges' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221110_06_Pq2kT-create-generic-roles-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS role_privileges(
+ role_id TEXT NOT NULL,
+ privilege_id TEXT NOT NULL,
+ PRIMARY KEY(role_id, privilege_id),
+ FOREIGN KEY(role_id) REFERENCES roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(privilege_id) REFERENCES privileges(privilege_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS role_privileges"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS idx_tbl_role_privileges_cols_role_id
+ ON role_privileges(role_id)
+ """,
+ "DROP INDEX IF EXISTS idx_tbl_role_privileges_cols_role_id")
+]
diff --git a/migrations/auth/20221110_08_23psB-add-privilege-category-and-privilege-description-columns-to-privileges-table.py b/migrations/auth/20221110_08_23psB-add-privilege-category-and-privilege-description-columns-to-privileges-table.py
new file mode 100644
index 0000000..077182b
--- /dev/null
+++ b/migrations/auth/20221110_08_23psB-add-privilege-category-and-privilege-description-columns-to-privileges-table.py
@@ -0,0 +1,22 @@
+"""
+Add 'privilege_category' and 'privilege_description' columns to 'privileges' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221110_07_7WGa1-create-role-privileges-table'}
+
+steps = [
+ step(
+ """
+ ALTER TABLE privileges ADD COLUMN
+ privilege_category TEXT NOT NULL DEFAULT 'common'
+ """,
+ "ALTER TABLE privileges DROP COLUMN privilege_category"),
+ step(
+ """
+ ALTER TABLE privileges ADD COLUMN
+ privilege_description TEXT
+ """,
+ "ALTER TABLE privileges DROP COLUMN privilege_description")
+]
diff --git a/migrations/auth/20221113_01_7M0hv-enumerate-initial-privileges.py b/migrations/auth/20221113_01_7M0hv-enumerate-initial-privileges.py
new file mode 100644
index 0000000..072f226
--- /dev/null
+++ b/migrations/auth/20221113_01_7M0hv-enumerate-initial-privileges.py
@@ -0,0 +1,66 @@
+"""
+Enumerate initial privileges
+"""
+
+from yoyo import step
+
+__depends__ = {'20221110_08_23psB-add-privilege-category-and-privilege-description-columns-to-privileges-table'}
+
+steps = [
+ step(
+ """
+ INSERT INTO
+ privileges(privilege_id, privilege_name, privilege_category,
+ privilege_description)
+ VALUES
+ -- group-management privileges
+ ('4842e2aa-38b9-4349-805e-0a99a9cf8bff', 'create-group',
+ 'group-management', 'Create a group'),
+ ('3ebfe79c-d159-4629-8b38-772cf4bc2261', 'view-group',
+ 'group-management', 'View the details of a group'),
+ ('52576370-b3c7-4e6a-9f7e-90e9dbe24d8f', 'edit-group',
+ 'group-management', 'Edit the details of a group'),
+ ('13ec2a94-4f1a-442d-aad2-936ad6dd5c57', 'delete-group',
+ 'group-management', 'Delete a group'),
+ ('ae4add8c-789a-4d11-a6e9-a306470d83d9', 'add-group-member',
+ 'group-management', 'Add a user to a group'),
+ ('f1bd3f42-567e-4965-9643-6d1a52ddee64', 'remove-group-member',
+ 'group-management', 'Remove a user from a group'),
+ ('80f11285-5079-4ec0-907c-06509f88a364', 'assign-group-leader',
+ 'group-management', 'Assign user group-leader privileges'),
+ ('d4afe2b3-4ca0-4edd-b37d-966535b5e5bd',
+ 'transfer-group-leadership', 'group-management',
+ 'Transfer leadership of the group to some other member'),
+
+ -- resource-management privileges
+ ('aa25b32a-bff2-418d-b0a2-e26b4a8f089b', 'create-resource',
+ 'resource-management', 'Create a resource object'),
+ ('7f261757-3211-4f28-a43f-a09b800b164d', 'view-resource',
+ 'resource-management', 'view a resource and use it in computations'),
+ ('2f980855-959b-4339-b80e-25d1ec286e21', 'edit-resource',
+ 'resource-management', 'edit/update a resource'),
+ ('d2a070fd-e031-42fb-ba41-d60cf19e5d6d', 'delete-resource',
+ 'resource-management', 'Delete a resource'),
+
+ -- role-management privileges
+ ('221660b1-df05-4be1-b639-f010269dbda9', 'create-role',
+ 'role-management', 'Create a new role'),
+ ('7bcca363-cba9-4169-9e31-26bdc6179b28', 'edit-role',
+ 'role-management', 'edit/update an existing role'),
+ ('5103cc68-96f8-4ebb-83a4-a31692402c9b', 'assign-role',
+ 'role-management', 'Assign a role to an existing user'),
+ ('1c59eff5-9336-4ed2-a166-8f70d4cb012e', 'delete-role',
+ 'role-management', 'Delete an existing role'),
+
+ -- user-management privileges
+ ('e7252301-6ee0-43ba-93ef-73b607cf06f6', 'reset-any-password',
+ 'user-management', 'Reset the password for any user'),
+ ('1fe61370-cae9-4983-bd6c-ce61050c510f', 'delete-any-user',
+ 'user-management', 'Delete any user from the system'),
+
+ -- sytem-admin privileges
+ ('519db546-d44e-4fdc-9e4e-25aa67548ab3', 'masquerade',
+ 'system-admin', 'Masquerade as some other user')
+ """,
+ "DELETE FROM privileges")
+]
diff --git a/migrations/auth/20221114_01_n8gsF-create-generic-role-privileges-table.py b/migrations/auth/20221114_01_n8gsF-create-generic-role-privileges-table.py
new file mode 100644
index 0000000..2048f4a
--- /dev/null
+++ b/migrations/auth/20221114_01_n8gsF-create-generic-role-privileges-table.py
@@ -0,0 +1,35 @@
+"""
+Create 'generic_role_privileges' table
+
+This table links the generic_roles to the privileges they provide
+"""
+
+from yoyo import step
+
+__depends__ = {'20221113_01_7M0hv-enumerate-initial-privileges'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS generic_role_privileges(
+ generic_role_id TEXT NOT NULL,
+ privilege_id TEXT NOT NULL,
+ PRIMARY KEY(generic_role_id, privilege_id),
+ FOREIGN KEY(generic_role_id) REFERENCES generic_roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(privilege_id) REFERENCES privileges(privilege_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS generic_role_privileges"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS
+ idx_tbl_generic_role_privileges_cols_generic_role_id
+ ON generic_role_privileges(generic_role_id)
+ """,
+ """
+ DROP INDEX IF EXISTS
+ idx_tbl_generic_role_privileges_cols_generic_role_id
+ """)
+]
diff --git a/migrations/auth/20221114_02_DKKjn-drop-generic-role-tables.py b/migrations/auth/20221114_02_DKKjn-drop-generic-role-tables.py
new file mode 100644
index 0000000..6bd101b
--- /dev/null
+++ b/migrations/auth/20221114_02_DKKjn-drop-generic-role-tables.py
@@ -0,0 +1,41 @@
+"""
+Drop 'generic_role*' tables
+"""
+
+from yoyo import step
+
+__depends__ = {'20221114_01_n8gsF-create-generic-role-privileges-table'}
+
+steps = [
+ step(
+ """
+ DROP INDEX IF EXISTS
+ idx_tbl_generic_role_privileges_cols_generic_role_id
+ """,
+ """
+ CREATE INDEX IF NOT EXISTS
+ idx_tbl_generic_role_privileges_cols_generic_role_id
+ ON generic_role_privileges(generic_role_id)
+ """),
+ step(
+ "DROP TABLE IF EXISTS generic_role_privileges",
+ """
+ CREATE TABLE IF NOT EXISTS generic_role_privileges(
+ generic_role_id TEXT NOT NULL,
+ privilege_id TEXT NOT NULL,
+ PRIMARY KEY(generic_role_id, privilege_id),
+ FOREIGN KEY(generic_role_id) REFERENCES generic_roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(privilege_id) REFERENCES privileges(privilege_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """),
+ step(
+ "DROP TABLE IF EXISTS generic_roles",
+ """
+ CREATE TABLE IF NOT EXISTS generic_roles(
+ role_id TEXT PRIMARY KEY,
+ role_name TEXT NOT NULL
+ ) WITHOUT ROWID
+ """)
+]
diff --git a/migrations/auth/20221114_03_PtWjc-create-group-roles-table.py b/migrations/auth/20221114_03_PtWjc-create-group-roles-table.py
new file mode 100644
index 0000000..a7e7b45
--- /dev/null
+++ b/migrations/auth/20221114_03_PtWjc-create-group-roles-table.py
@@ -0,0 +1,29 @@
+"""
+Create 'group_roles' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221114_02_DKKjn-drop-generic-role-tables'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS group_roles(
+ group_id TEXT NOT NULL,
+ role_id TEXT NOT NULL,
+ PRIMARY KEY(group_id, role_id),
+ FOREIGN KEY(group_id) REFERENCES groups(group_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(role_id) REFERENCES roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS group_roles"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS idx_tbl_group_roles_cols_group_id
+ ON group_roles(group_id)
+ """,
+ "DROP INDEX IF EXISTS idx_tbl_group_roles_cols_group_id")
+]
diff --git a/migrations/auth/20221114_04_tLUzB-initialise-basic-roles.py b/migrations/auth/20221114_04_tLUzB-initialise-basic-roles.py
new file mode 100644
index 0000000..386f481
--- /dev/null
+++ b/migrations/auth/20221114_04_tLUzB-initialise-basic-roles.py
@@ -0,0 +1,56 @@
+"""
+Initialise basic roles
+"""
+
+from yoyo import step
+
+__depends__ = {'20221114_03_PtWjc-create-group-roles-table'}
+
+steps = [
+ step(
+ """
+ INSERT INTO roles(role_id, role_name, user_editable) VALUES
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30', 'group-leader', '0'),
+ ('522e4d40-aefc-4a64-b7e0-768b8be517ee', 'resource-owner', '0')
+ """,
+ "DELETE FROM roles"),
+ step(
+ """
+ INSERT INTO role_privileges(role_id, privilege_id)
+ VALUES
+ -- group-management
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '4842e2aa-38b9-4349-805e-0a99a9cf8bff'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '3ebfe79c-d159-4629-8b38-772cf4bc2261'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '52576370-b3c7-4e6a-9f7e-90e9dbe24d8f'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '13ec2a94-4f1a-442d-aad2-936ad6dd5c57'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ 'ae4add8c-789a-4d11-a6e9-a306470d83d9'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ 'f1bd3f42-567e-4965-9643-6d1a52ddee64'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ 'd4afe2b3-4ca0-4edd-b37d-966535b5e5bd'),
+
+ -- resource-management
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ 'aa25b32a-bff2-418d-b0a2-e26b4a8f089b'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '7f261757-3211-4f28-a43f-a09b800b164d'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '2f980855-959b-4339-b80e-25d1ec286e21'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ 'd2a070fd-e031-42fb-ba41-d60cf19e5d6d'),
+ ('522e4d40-aefc-4a64-b7e0-768b8be517ee',
+ 'aa25b32a-bff2-418d-b0a2-e26b4a8f089b'),
+ ('522e4d40-aefc-4a64-b7e0-768b8be517ee',
+ '7f261757-3211-4f28-a43f-a09b800b164d'),
+ ('522e4d40-aefc-4a64-b7e0-768b8be517ee',
+ '2f980855-959b-4339-b80e-25d1ec286e21'),
+ ('522e4d40-aefc-4a64-b7e0-768b8be517ee',
+ 'd2a070fd-e031-42fb-ba41-d60cf19e5d6d')
+ """,
+ "DELETE FROM role_privileges")
+]
diff --git a/migrations/auth/20221114_05_hQun6-create-user-roles-table.py b/migrations/auth/20221114_05_hQun6-create-user-roles-table.py
new file mode 100644
index 0000000..e0de751
--- /dev/null
+++ b/migrations/auth/20221114_05_hQun6-create-user-roles-table.py
@@ -0,0 +1,29 @@
+"""
+Create 'user_roles' table.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221114_04_tLUzB-initialise-basic-roles'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS user_roles(
+ user_id TEXT NOT NULL,
+ role_id TEXT NOT NULL,
+ PRIMARY KEY(user_id, role_id),
+ FOREIGN KEY(user_id) REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(role_id) REFERENCES roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS user_roles"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS idx_tbl_user_roles_cols_user_id
+ ON user_roles(user_id)
+ """,
+ "DROP INDEX IF EXISTS idx_tbl_user_roles_cols_user_id")
+]
diff --git a/migrations/auth/20221116_01_nKUmX-add-privileges-to-group-leader-role.py b/migrations/auth/20221116_01_nKUmX-add-privileges-to-group-leader-role.py
new file mode 100644
index 0000000..2e4ae28
--- /dev/null
+++ b/migrations/auth/20221116_01_nKUmX-add-privileges-to-group-leader-role.py
@@ -0,0 +1,35 @@
+"""
+Add privileges to 'group-leader' role.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221114_05_hQun6-create-user-roles-table'}
+
+steps = [
+ step(
+ """
+ INSERT INTO role_privileges(role_id, privilege_id)
+ VALUES
+ -- role management
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '221660b1-df05-4be1-b639-f010269dbda9'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '7bcca363-cba9-4169-9e31-26bdc6179b28'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '5103cc68-96f8-4ebb-83a4-a31692402c9b'),
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '1c59eff5-9336-4ed2-a166-8f70d4cb012e')
+ """,
+ """
+ DELETE FROM role_privileges
+ WHERE
+ role_id='a0e67630-d502-4b9f-b23f-6805d0f30e30'
+ AND privilege_id IN (
+ '221660b1-df05-4be1-b639-f010269dbda9',
+ '7bcca363-cba9-4169-9e31-26bdc6179b28',
+ '5103cc68-96f8-4ebb-83a4-a31692402c9b',
+ '1c59eff5-9336-4ed2-a166-8f70d4cb012e'
+ )
+ """)
+]
diff --git a/migrations/auth/20221117_01_RDlfx-modify-group-roles-add-group-role-id.py b/migrations/auth/20221117_01_RDlfx-modify-group-roles-add-group-role-id.py
new file mode 100644
index 0000000..a4d7806
--- /dev/null
+++ b/migrations/auth/20221117_01_RDlfx-modify-group-roles-add-group-role-id.py
@@ -0,0 +1,52 @@
+"""
+Modify 'group_roles': add 'group_role_id'
+
+At this point, there is no data in the `group_roles` table and therefore, it
+should be safe to simply recreate it.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221116_01_nKUmX-add-privileges-to-group-leader-role'}
+
+steps = [
+ step(
+ "DROP INDEX IF EXISTS idx_tbl_group_roles_cols_group_id",
+ """
+ CREATE INDEX IF NOT EXISTS idx_tbl_group_roles_cols_group_id
+ ON group_roles(group_id)
+ """),
+ step(
+ "DROP TABLE IF EXISTS group_roles",
+ """
+ CREATE TABLE IF NOT EXISTS group_roles(
+ group_id TEXT NOT NULL,
+ role_id TEXT NOT NULL,
+ PRIMARY KEY(group_id, role_id),
+ FOREIGN KEY(group_id) REFERENCES groups(group_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(role_id) REFERENCES roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """),
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS group_roles(
+ group_role_id TEXT PRIMARY KEY,
+ group_id TEXT NOT NULL,
+ role_id TEXT NOT NULL,
+ UNIQUE (group_id, role_id),
+ FOREIGN KEY(group_id) REFERENCES groups(group_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY(role_id) REFERENCES roles(role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS group_roles"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS idx_tbl_group_roles_cols_group_id
+ ON group_roles(group_id)
+ """,
+ "DROP INDEX IF EXISTS idx_tbl_group_roles_cols_group_id")
+]
diff --git a/migrations/auth/20221117_02_fmuZh-create-group-users-table.py b/migrations/auth/20221117_02_fmuZh-create-group-users-table.py
new file mode 100644
index 0000000..92885ef
--- /dev/null
+++ b/migrations/auth/20221117_02_fmuZh-create-group-users-table.py
@@ -0,0 +1,25 @@
+"""
+Create 'group_users' table.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221117_01_RDlfx-modify-group-roles-add-group-role-id'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS group_users(
+ group_id TEXT NOT NULL,
+ user_id TEXT NOT NULL UNIQUE, -- user can only be in one group
+ PRIMARY KEY(group_id, user_id)
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS group_users"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS tbl_group_users_cols_group_id
+ ON group_users(group_id)
+ """,
+ "DROP INDEX IF EXISTS tbl_group_users_cols_group_id")
+]
diff --git a/migrations/auth/20221206_01_BbeF9-create-group-user-roles-on-resources-table.py b/migrations/auth/20221206_01_BbeF9-create-group-user-roles-on-resources-table.py
new file mode 100644
index 0000000..9aa3667
--- /dev/null
+++ b/migrations/auth/20221206_01_BbeF9-create-group-user-roles-on-resources-table.py
@@ -0,0 +1,39 @@
+"""
+Create 'group_user_roles_on_resources' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221117_02_fmuZh-create-group-users-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE group_user_roles_on_resources (
+ group_id TEXT NOT NULL,
+ user_id TEXT NOT NULL,
+ role_id TEXT NOT NULL,
+ resource_id TEXT NOT NULL,
+ PRIMARY KEY (group_id, user_id, role_id, resource_id),
+ FOREIGN KEY (user_id)
+ REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (group_id, role_id)
+ REFERENCES group_roles(group_id, role_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (group_id, resource_id)
+ REFERENCES resources(group_id, resource_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS group_user_roles_on_resources"),
+ step(
+ """
+ CREATE INDEX IF NOT EXISTS
+ idx_tbl_group_user_roles_on_resources_group_user_resource
+ ON group_user_roles_on_resources(group_id, user_id, resource_id)
+ """,
+ """
+ DROP INDEX IF EXISTS
+ idx_tbl_group_user_roles_on_resources_group_user_resource""")
+]
diff --git a/migrations/auth/20221208_01_sSdHz-add-public-column-to-resources-table.py b/migrations/auth/20221208_01_sSdHz-add-public-column-to-resources-table.py
new file mode 100644
index 0000000..2238069
--- /dev/null
+++ b/migrations/auth/20221208_01_sSdHz-add-public-column-to-resources-table.py
@@ -0,0 +1,16 @@
+"""
+Add 'public' column to 'resources' table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221206_01_BbeF9-create-group-user-roles-on-resources-table'}
+
+steps = [
+ step(
+ """
+ ALTER TABLE resources ADD COLUMN
+ public INTEGER NOT NULL DEFAULT 0 CHECK (public=0 or public=1)
+ """,
+ "ALTER TABLE resources DROP COLUMN public")
+]
diff --git a/migrations/auth/20221219_01_CI3tN-create-oauth2-clients-table.py b/migrations/auth/20221219_01_CI3tN-create-oauth2-clients-table.py
new file mode 100644
index 0000000..475be01
--- /dev/null
+++ b/migrations/auth/20221219_01_CI3tN-create-oauth2-clients-table.py
@@ -0,0 +1,25 @@
+"""
+create oauth2_clients table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221208_01_sSdHz-add-public-column-to-resources-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS oauth2_clients(
+ client_id TEXT NOT NULL,
+ client_secret TEXT NOT NULL,
+ client_id_issued_at INTEGER NOT NULL,
+ client_secret_expires_at INTEGER NOT NULL,
+ client_metadata TEXT,
+ user_id TEXT NOT NULL,
+ PRIMARY KEY(client_id),
+ FOREIGN KEY(user_id) REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS oauth2_clients")
+]
diff --git a/migrations/auth/20221219_02_buSEU-create-oauth2-tokens-table.py b/migrations/auth/20221219_02_buSEU-create-oauth2-tokens-table.py
new file mode 100644
index 0000000..778282b
--- /dev/null
+++ b/migrations/auth/20221219_02_buSEU-create-oauth2-tokens-table.py
@@ -0,0 +1,31 @@
+"""
+create oauth2_tokens table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221219_01_CI3tN-create-oauth2-clients-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE oauth2_tokens(
+ token_id TEXT NOT NULL,
+ client_id TEXT NOT NULL,
+ token_type TEXT NOT NULL,
+ access_token TEXT UNIQUE NOT NULL,
+ refresh_token TEXT,
+ scope TEXT,
+ revoked INTEGER CHECK (revoked = 0 or revoked = 1),
+ issued_at INTEGER NOT NULL,
+ expires_in INTEGER NOT NULL,
+ user_id TEXT NOT NULL,
+ PRIMARY KEY(token_id),
+ FOREIGN KEY (client_id) REFERENCES oauth2_clients(client_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (user_id) REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS oauth2_tokens")
+]
diff --git a/migrations/auth/20221219_03_PcTrb-create-authorisation-code-table.py b/migrations/auth/20221219_03_PcTrb-create-authorisation-code-table.py
new file mode 100644
index 0000000..1683f87
--- /dev/null
+++ b/migrations/auth/20221219_03_PcTrb-create-authorisation-code-table.py
@@ -0,0 +1,31 @@
+"""
+create authorisation_code table
+"""
+
+from yoyo import step
+
+__depends__ = {'20221219_02_buSEU-create-oauth2-tokens-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE authorisation_code (
+ code_id TEXT NOT NULL,
+ code TEXT UNIQUE NOT NULL,
+ client_id NOT NULL,
+ redirect_uri TEXT,
+ scope TEXT,
+ nonce TEXT,
+ auth_time INTEGER NOT NULL,
+ code_challenge TEXT,
+ code_challenge_method TEXT,
+ user_id TEXT NOT NULL,
+ PRIMARY KEY (code_id),
+ FOREIGN KEY (client_id) REFERENCES oauth2_clients(client_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (user_id) REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS authorisation_code")
+]
diff --git a/migrations/auth/20230111_01_Wd6IZ-remove-create-group-privilege-from-group-leader.py b/migrations/auth/20230111_01_Wd6IZ-remove-create-group-privilege-from-group-leader.py
new file mode 100644
index 0000000..7e7fda2
--- /dev/null
+++ b/migrations/auth/20230111_01_Wd6IZ-remove-create-group-privilege-from-group-leader.py
@@ -0,0 +1,40 @@
+"""
+remove 'create-group' privilege from group-leader.
+"""
+
+from yoyo import step
+
+__depends__ = {'20221219_03_PcTrb-create-authorisation-code-table'}
+
+steps = [
+ step(
+ """
+ DELETE FROM role_privileges
+ WHERE role_id='a0e67630-d502-4b9f-b23f-6805d0f30e30'
+ AND privilege_id='4842e2aa-38b9-4349-805e-0a99a9cf8bff'
+ """,
+ """
+ INSERT INTO role_privileges VALUES
+ ('a0e67630-d502-4b9f-b23f-6805d0f30e30',
+ '4842e2aa-38b9-4349-805e-0a99a9cf8bff')
+ """),
+ step(
+ """
+ INSERT INTO roles(role_id, role_name, user_editable) VALUES
+ ('ade7e6b0-ba9c-4b51-87d0-2af7fe39a347', 'group-creator', '0')
+ """,
+ """
+ DELETE FROM roles WHERE role_id='ade7e6b0-ba9c-4b51-87d0-2af7fe39a347'
+ """),
+ step(
+ """
+ INSERT INTO role_privileges VALUES
+ ('ade7e6b0-ba9c-4b51-87d0-2af7fe39a347',
+ '4842e2aa-38b9-4349-805e-0a99a9cf8bff')
+ """,
+ """
+ DELETE FROM role_privileges
+ WHERE role_id='ade7e6b0-ba9c-4b51-87d0-2af7fe39a347'
+ AND privilege_id='4842e2aa-38b9-4349-805e-0a99a9cf8bff'
+ """)
+]
diff --git a/migrations/auth/20230116_01_KwuJ3-rework-privileges-schema.py b/migrations/auth/20230116_01_KwuJ3-rework-privileges-schema.py
new file mode 100644
index 0000000..1ef5ab0
--- /dev/null
+++ b/migrations/auth/20230116_01_KwuJ3-rework-privileges-schema.py
@@ -0,0 +1,111 @@
+"""
+rework privileges schema
+"""
+import contextlib
+
+from yoyo import step
+
+__depends__ = {'20230111_01_Wd6IZ-remove-create-group-privilege-from-group-leader'}
+
+privileges = ( # format: (original_id, original_name, new_id, category)
+ ("13ec2a94-4f1a-442d-aad2-936ad6dd5c57", "delete-group",
+ "system:group:delete-group", "group-management"),
+ ("1c59eff5-9336-4ed2-a166-8f70d4cb012e", "delete-role",
+ "group:role:delete-role", "role-management"),
+ ("1fe61370-cae9-4983-bd6c-ce61050c510f", "delete-any-user",
+ "system:user:delete-user", "user-management"),
+ ("221660b1-df05-4be1-b639-f010269dbda9", "create-role",
+ "group:role:create-role", "role-management"),
+ ("2f980855-959b-4339-b80e-25d1ec286e21", "edit-resource",
+ "group:resource:edit-resource", "resource-management"),
+ ("3ebfe79c-d159-4629-8b38-772cf4bc2261", "view-group",
+ "system:group:view-group", "group-management"),
+ ("4842e2aa-38b9-4349-805e-0a99a9cf8bff", "create-group",
+ "system:group:create-group", "group-management"),
+ ("5103cc68-96f8-4ebb-83a4-a31692402c9b", "assign-role",
+ "group:user:assign-role", "role-management"),
+ ("519db546-d44e-4fdc-9e4e-25aa67548ab3", "masquerade",
+ "system:user:masquerade", "system-admin"),
+ ("52576370-b3c7-4e6a-9f7e-90e9dbe24d8f", "edit-group",
+ "system:group:edit-group", "group-management"),
+ ("7bcca363-cba9-4169-9e31-26bdc6179b28", "edit-role",
+ "group:role:edit-role", "role-management"),
+ ("7f261757-3211-4f28-a43f-a09b800b164d", "view-resource",
+ "group:resource:view-resource", "resource-management"),
+ ("80f11285-5079-4ec0-907c-06509f88a364", "assign-group-leader",
+ "system:user:assign-group-leader", "group-management"),
+ ("aa25b32a-bff2-418d-b0a2-e26b4a8f089b", "create-resource",
+ "group:resource:create-resource", "resource-management"),
+ ("ae4add8c-789a-4d11-a6e9-a306470d83d9", "add-group-member",
+ "group:user:add-group-member", "group-management"),
+ ("d2a070fd-e031-42fb-ba41-d60cf19e5d6d", "delete-resource",
+ "group:resource:delete-resource", "resource-management"),
+ ("d4afe2b3-4ca0-4edd-b37d-966535b5e5bd", "transfer-group-leadership",
+ "system:group:transfer-group-leader", "group-management"),
+ ("e7252301-6ee0-43ba-93ef-73b607cf06f6", "reset-any-password",
+ "system:user:reset-password", "user-management"),
+ ("f1bd3f42-567e-4965-9643-6d1a52ddee64", "remove-group-member",
+ "group:user:remove-group-member", "group-management"))
+
+def rework_privileges_table(cursor):
+ "rework the schema"
+ cursor.executemany(
+ ("UPDATE privileges SET privilege_id=:id "
+ "WHERE privilege_id=:old_id"),
+ ({"id": row[2], "old_id": row[0]} for row in privileges))
+ cursor.execute("ALTER TABLE privileges DROP COLUMN privilege_category")
+ cursor.execute("ALTER TABLE privileges DROP COLUMN privilege_name")
+
+def restore_privileges_table(cursor):
+ "restore the schema"
+ cursor.execute((
+ "CREATE TABLE privileges_restore ("
+ " privilege_id TEXT PRIMARY KEY,"
+ " privilege_name TEXT NOT NULL,"
+ " privilege_category TEXT NOT NULL DEFAULT 'common',"
+ " privilege_description TEXT"
+ ")"))
+ id_dict = {row[2]: {"id": row[0], "name": row[1], "cat": row[3]}
+ for row in privileges}
+ cursor.execute(
+ "SELECT privilege_id, privilege_description FROM privileges")
+ params = ({**id_dict[row[0]], "desc": row[1]} for row in cursor.fetchall())
+ cursor.executemany(
+ "INSERT INTO privileges_restore VALUES (:id, :name, :cat, :desc)",
+ params)
+ cursor.execute("DROP TABLE privileges")
+ cursor.execute("ALTER TABLE privileges_restore RENAME TO privileges")
+
+def update_privilege_ids_in_role_privileges(cursor):
+ """Update the ids to new form."""
+ cursor.executemany(
+ ("UPDATE role_privileges SET privilege_id=:new_id "
+ "WHERE privilege_id=:old_id"),
+ ({"new_id": row[2], "old_id": row[0]} for row in privileges))
+
+def restore_privilege_ids_in_role_privileges(cursor):
+ """Restore original ids"""
+ cursor.executemany(
+ ("UPDATE role_privileges SET privilege_id=:old_id "
+ "WHERE privilege_id=:new_id"),
+ ({"new_id": row[2], "old_id": row[0]} for row in privileges))
+
+def change_schema(conn):
+ """Change the privileges schema and IDs"""
+ with contextlib.closing(conn.cursor()) as cursor:
+ cursor.execute("PRAGMA foreign_keys=OFF")
+ rework_privileges_table(cursor)
+ update_privilege_ids_in_role_privileges(cursor)
+ cursor.execute("PRAGMA foreign_keys=ON")
+
+def restore_schema(conn):
+ """Change the privileges schema and IDs"""
+ with contextlib.closing(conn.cursor()) as cursor:
+ cursor.execute("PRAGMA foreign_keys=OFF")
+ restore_privilege_ids_in_role_privileges(cursor)
+ restore_privileges_table(cursor)
+ cursor.execute("PRAGMA foreign_keys=ON")
+
+steps = [
+ step(change_schema, restore_schema)
+]
diff --git a/migrations/auth/20230207_01_r0bkZ-create-group-join-requests-table.py b/migrations/auth/20230207_01_r0bkZ-create-group-join-requests-table.py
new file mode 100644
index 0000000..ceae5ea
--- /dev/null
+++ b/migrations/auth/20230207_01_r0bkZ-create-group-join-requests-table.py
@@ -0,0 +1,29 @@
+"""
+Create group_requests table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230116_01_KwuJ3-rework-privileges-schema'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS group_join_requests(
+ request_id TEXT NOT NULL,
+ group_id TEXT NOT NULL,
+ requester_id TEXT NOT NULL,
+ timestamp REAL NOT NULL,
+ status TEXT NOT NULL DEFAULT 'PENDING',
+ message TEXT,
+ PRIMARY KEY(request_id, group_id),
+ FOREIGN KEY(group_id) REFERENCES groups(group_id)
+ ON UPDATE CASCADE ON DELETE CASCADE,
+ FOREIGN KEY (requester_id) REFERENCES users(user_id)
+ ON UPDATE CASCADE ON DELETE CASCADE,
+ UNIQUE(group_id, requester_id),
+ CHECK (status IN ('PENDING', 'ACCEPTED', 'REJECTED'))
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS group_join_requests")
+]
diff --git a/migrations/auth/20230210_01_8xMa1-system-admin-privileges-for-data-distribution.py b/migrations/auth/20230210_01_8xMa1-system-admin-privileges-for-data-distribution.py
new file mode 100644
index 0000000..8b406a6
--- /dev/null
+++ b/migrations/auth/20230210_01_8xMa1-system-admin-privileges-for-data-distribution.py
@@ -0,0 +1,22 @@
+"""
+System admin privileges for data distribution
+
+These privileges are focussed on allowing the system administrator to link the
+datasets and traits in the main database to specific groups in the auth system.
+"""
+
+from yoyo import step
+
+__depends__ = {'20230207_01_r0bkZ-create-group-join-requests-table'}
+
+steps = [
+ step(
+ """
+ INSERT INTO privileges VALUES
+ ('system:data:link-to-group', 'Link a dataset or trait to a group.')
+ """,
+ """
+ DELETE FROM privileges WHERE privilege_id IN
+ ('system:data:link-to-group')
+ """)
+]
diff --git a/migrations/auth/20230210_02_lDK14-create-system-admin-role.py b/migrations/auth/20230210_02_lDK14-create-system-admin-role.py
new file mode 100644
index 0000000..9b3fc2b
--- /dev/null
+++ b/migrations/auth/20230210_02_lDK14-create-system-admin-role.py
@@ -0,0 +1,38 @@
+"""
+Create system-admin role
+"""
+import uuid
+from contextlib import closing
+
+from yoyo import step
+
+__depends__ = {'20230210_01_8xMa1-system-admin-privileges-for-data-distribution'}
+
+def create_sys_admin_role(conn):
+ with closing(conn.cursor()) as cursor:
+ role_id = uuid.uuid4()
+ cursor.execute(
+ "INSERT INTO roles VALUES (?, 'system-administrator', '0')",
+ (str(role_id),))
+
+ cursor.executemany(
+ "INSERT INTO role_privileges VALUES (:role_id, :privilege_id)",
+ ({"role_id": f"{role_id}", "privilege_id": priv}
+ for priv in (
+ "system:data:link-to-group",
+ "system:group:create-group",
+ "system:group:delete-group",
+ "system:group:edit-group",
+ "system:group:transfer-group-leader",
+ "system:group:view-group",
+ "system:user:assign-group-leader",
+ "system:user:delete-user",
+ "system:user:masquerade",
+ "system:user:reset-password")))
+
+def drop_sys_admin_role(conn):
+ pass
+
+steps = [
+ step(create_sys_admin_role, drop_sys_admin_role)
+]
diff --git a/migrations/auth/20230306_01_pRfxl-add-system-user-list-privilege.py b/migrations/auth/20230306_01_pRfxl-add-system-user-list-privilege.py
new file mode 100644
index 0000000..84bbd49
--- /dev/null
+++ b/migrations/auth/20230306_01_pRfxl-add-system-user-list-privilege.py
@@ -0,0 +1,26 @@
+"""
+Add system:user:list privilege
+"""
+import contextlib
+
+from yoyo import step
+
+__depends__ = {'20230210_02_lDK14-create-system-admin-role'}
+
+def insert_users_list_priv(conn):
+ """Create a new 'system:user:list' privilege."""
+ with contextlib.closing(conn.cursor()) as cursor:
+ cursor.execute(
+ "INSERT INTO privileges(privilege_id, privilege_description) "
+ "VALUES('system:user:list', 'List users in the system') "
+ "ON CONFLICT (privilege_id) DO NOTHING")
+
+def delete_users_list_priv(conn):
+ """Delete the new 'system:user:list' privilege."""
+ with contextlib.closing(conn.cursor()) as cursor:
+ cursor.execute(
+ "DELETE FROM privileges WHERE privilege_id='system:user:list'")
+
+steps = [
+ step(insert_users_list_priv, delete_users_list_priv)
+]
diff --git a/migrations/auth/20230306_02_7GnRY-add-system-user-list-privilege-to-system-administrator-and-group-leader-roles.py b/migrations/auth/20230306_02_7GnRY-add-system-user-list-privilege-to-system-administrator-and-group-leader-roles.py
new file mode 100644
index 0000000..3caad55
--- /dev/null
+++ b/migrations/auth/20230306_02_7GnRY-add-system-user-list-privilege-to-system-administrator-and-group-leader-roles.py
@@ -0,0 +1,42 @@
+"""
+Add system:user:list privilege to system-administrator and group-leader roles.
+"""
+import uuid
+import contextlib
+
+from yoyo import step
+
+__depends__ = {'20230306_01_pRfxl-add-system-user-list-privilege'}
+
+def role_ids(cursor):
+ """Get role ids from names"""
+ cursor.execute(
+ "SELECT * FROM roles WHERE role_name IN "
+ "('system-administrator', 'group-leader')")
+ return (uuid.UUID(row[0]) for row in cursor.fetchall())
+
+def add_privilege_to_roles(conn):
+ """
+ Add 'system:user:list' privilege to 'system-administrator' and
+ 'group-leader' roles."""
+ with contextlib.closing(conn.cursor()) as cursor:
+ cursor.executemany(
+ "INSERT INTO role_privileges(role_id,privilege_id) "
+ "VALUES(?, ?)",
+ tuple((str(role_id), "system:user:list")
+ for role_id in role_ids(cursor)))
+
+def del_privilege_from_roles(conn):
+ """
+ Delete 'system:user:list' privilege to 'system-administrator' and
+ 'group-leader' roles.
+ """
+ with contextlib.closing(conn.cursor()) as cursor:
+ cursor.execute(
+ "DELETE FROM role_privileges WHERE "
+ "role_id IN (?, ?) AND privilege_id='system:user:list'",
+ tuple(str(role_id) for role_id in role_ids(cursor)))
+
+steps = [
+ step(add_privilege_to_roles, del_privilege_from_roles)
+]
diff --git a/migrations/auth/20230322_01_0dDZR-create-linked-phenotype-data-table.py b/migrations/auth/20230322_01_0dDZR-create-linked-phenotype-data-table.py
new file mode 100644
index 0000000..647325f
--- /dev/null
+++ b/migrations/auth/20230322_01_0dDZR-create-linked-phenotype-data-table.py
@@ -0,0 +1,30 @@
+"""
+Create linked-phenotype-data table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230306_02_7GnRY-add-system-user-list-privilege-to-system-administrator-and-group-leader-roles'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS linked_phenotype_data
+ -- Link the data in MariaDB to user groups in the auth system
+ (
+ data_link_id TEXT NOT NULL PRIMARY KEY, -- A new ID for the auth system
+ group_id TEXT NOT NULL, -- The user group the data is linked to
+ SpeciesId TEXT NOT NULL, -- The species in MariaDB
+ InbredSetId TEXT NOT NULL, -- The traits group in MariaDB
+ PublishFreezeId TEXT NOT NULL, -- The dataset Id in MariaDB
+ dataset_name TEXT, -- dataset Name in MariaDB
+ dataset_fullname, -- dataset FullName in MariaDB
+ dataset_shortname, -- dataset ShortName in MariaDB
+ PublishXRefId TEXT NOT NULL, -- The trait's ID in MariaDB
+ FOREIGN KEY (group_id)
+ REFERENCES groups(group_id) ON UPDATE CASCADE ON DELETE RESTRICT
+ UNIQUE (SpeciesId, InbredSetId, PublishFreezeId, PublishXRefId)
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS linked_phenotype_data")
+]
diff --git a/migrations/auth/20230322_02_Ll854-create-phenotype-resources-table.py b/migrations/auth/20230322_02_Ll854-create-phenotype-resources-table.py
new file mode 100644
index 0000000..7c9e986
--- /dev/null
+++ b/migrations/auth/20230322_02_Ll854-create-phenotype-resources-table.py
@@ -0,0 +1,29 @@
+"""
+Create phenotype_resources table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230322_01_0dDZR-create-linked-phenotype-data-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS phenotype_resources
+ -- Link phenotype data to specific resources
+ (
+ group_id TEXT NOT NULL,
+ resource_id TEXT NOT NULL, -- A resource can have multiple data items
+ data_link_id TEXT NOT NULL,
+ PRIMARY KEY(group_id, resource_id, data_link_id),
+ UNIQUE (data_link_id), -- ensure data is linked to only one resource
+ FOREIGN KEY (group_id, resource_id)
+ REFERENCES resources(group_id, resource_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (data_link_id)
+ REFERENCES linked_phenotype_data(data_link_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS phenotype_resources")
+]
diff --git a/migrations/auth/20230404_01_VKxXg-create-linked-genotype-data-table.py b/migrations/auth/20230404_01_VKxXg-create-linked-genotype-data-table.py
new file mode 100644
index 0000000..02e8718
--- /dev/null
+++ b/migrations/auth/20230404_01_VKxXg-create-linked-genotype-data-table.py
@@ -0,0 +1,29 @@
+"""
+Create linked genotype data table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230322_02_Ll854-create-phenotype-resources-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS linked_genotype_data
+ -- Link genotype data in MariaDB to user groups in auth system
+ (
+ data_link_id TEXT NOT NULL PRIMARY KEY, -- A new ID for the auth system
+ group_id TEXT NOT NULL, -- The user group the data is linked to
+ SpeciesId TEXT NOT NULL, -- The species in MariaDB
+ InbredSetId TEXT NOT NULL, -- The traits group in MariaDB
+ GenoFreezeId TEXT NOT NULL, -- The dataset Id in MariaDB
+ dataset_name TEXT, -- dataset Name in MariaDB
+ dataset_fullname, -- dataset FullName in MariaDB
+ dataset_shortname, -- dataset ShortName in MariaDB
+ FOREIGN KEY (group_id)
+ REFERENCES groups(group_id) ON UPDATE CASCADE ON DELETE RESTRICT
+ UNIQUE (SpeciesId, InbredSetId, GenoFreezeId)
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS linked_genotype_data")
+]
diff --git a/migrations/auth/20230404_02_la33P-create-genotype-resources-table.py b/migrations/auth/20230404_02_la33P-create-genotype-resources-table.py
new file mode 100644
index 0000000..1a865e0
--- /dev/null
+++ b/migrations/auth/20230404_02_la33P-create-genotype-resources-table.py
@@ -0,0 +1,29 @@
+"""
+Create genotype resources table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230404_01_VKxXg-create-linked-genotype-data-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS genotype_resources
+ -- Link genotype data to specific resource
+ (
+ group_id TEXT NOT NULL,
+ resource_id TEXT NOT NULL, -- A resource can have multiple items
+ data_link_id TEXT NOT NULL,
+ PRIMARY KEY (group_id, resource_id, data_link_id),
+ UNIQUE (data_link_id) -- ensure data is linked to single resource
+ FOREIGN KEY (group_id, resource_id)
+ REFERENCES resources(group_id, resource_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (data_link_id)
+ REFERENCES linked_genotype_data(data_link_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS genotype_resources")
+]
diff --git a/migrations/auth/20230410_01_8mwaf-create-linked-mrna-data-table.py b/migrations/auth/20230410_01_8mwaf-create-linked-mrna-data-table.py
new file mode 100644
index 0000000..db9a6bf
--- /dev/null
+++ b/migrations/auth/20230410_01_8mwaf-create-linked-mrna-data-table.py
@@ -0,0 +1,30 @@
+"""
+Create linked mrna data table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230404_02_la33P-create-genotype-resources-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS linked_mrna_data
+ -- Link mRNA Assay data in MariaDB to user groups in auth system
+ (
+ data_link_id TEXT NOT NULL PRIMARY KEY, -- A new ID for the auth system
+ group_id TEXT NOT NULL, -- The user group the data is linked to
+ SpeciesId TEXT NOT NULL, -- The species in MariaDB
+ InbredSetId TEXT NOT NULL, -- The traits group in MariaDB
+ ProbeFreezeId TEXT NOT NULL, -- The study ID in MariaDB
+ ProbeSetFreezeId TEXT NOT NULL, -- The dataset Id in MariaDB
+ dataset_name TEXT, -- dataset Name in MariaDB
+ dataset_fullname, -- dataset FullName in MariaDB
+ dataset_shortname, -- dataset ShortName in MariaDB
+ FOREIGN KEY (group_id)
+ REFERENCES groups(group_id) ON UPDATE CASCADE ON DELETE RESTRICT
+ UNIQUE (SpeciesId, InbredSetId, ProbeFreezeId, ProbeSetFreezeId)
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS linked_mrna_data")
+]
diff --git a/migrations/auth/20230410_02_WZqSf-create-mrna-resources-table.py b/migrations/auth/20230410_02_WZqSf-create-mrna-resources-table.py
new file mode 100644
index 0000000..2ad1056
--- /dev/null
+++ b/migrations/auth/20230410_02_WZqSf-create-mrna-resources-table.py
@@ -0,0 +1,28 @@
+"""
+Create mRNA resources table
+"""
+
+from yoyo import step
+
+__depends__ = {'20230410_01_8mwaf-create-linked-mrna-data-table'}
+
+steps = [
+ step(
+ """
+ CREATE TABLE IF NOT EXISTS mrna_resources
+ -- Link mRNA data to specific resource
+ (
+ group_id TEXT NOT NULL,
+ resource_id TEXT NOT NULL, -- A resource can have multiple items
+ data_link_id TEXT NOT NULL,
+ PRIMARY KEY (resource_id, data_link_id),
+ UNIQUE (data_link_id) -- ensure data is linked to single resource
+ FOREIGN KEY (group_id, resource_id)
+ REFERENCES resources(group_id, resource_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT,
+ FOREIGN KEY (data_link_id) REFERENCES linked_mrna_data(data_link_id)
+ ON UPDATE CASCADE ON DELETE RESTRICT
+ ) WITHOUT ROWID
+ """,
+ "DROP TABLE IF EXISTS mrna_resources")
+]
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/__init__.py
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/__init__.py
diff --git a/tests/unit/auth/__init__.py b/tests/unit/auth/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/auth/__init__.py
diff --git a/tests/unit/auth/conftest.py b/tests/unit/auth/conftest.py
new file mode 100644
index 0000000..a7c64a8
--- /dev/null
+++ b/tests/unit/auth/conftest.py
@@ -0,0 +1,24 @@
+"""Module for fixtures and test utilities"""
+import uuid
+import datetime
+from contextlib import contextmanager
+
+from gn3.auth.authentication.oauth2.models.oauth2token import OAuth2Token
+
+from .fixtures import * # pylint: disable=[wildcard-import,unused-wildcard-import]
+
+def get_tokeniser(user):
+ """Get contextmanager for mocking token acquisition."""
+ @contextmanager
+ def __token__(*args, **kwargs):# pylint: disable=[unused-argument]
+ yield {
+ usr.user_id: OAuth2Token(
+ token_id=uuid.UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"),
+ client=None, token_type="Bearer", access_token="123456ABCDE",
+ refresh_token=None, revoked=False, expires_in=864000,
+ user=usr, issued_at=int(datetime.datetime.now().timestamp()),
+ scope="profile group role resource register-client")
+ for usr in TEST_USERS
+ }[user.user_id]
+
+ return __token__
diff --git a/tests/unit/auth/fixtures/__init__.py b/tests/unit/auth/fixtures/__init__.py
new file mode 100644
index 0000000..a675fc7
--- /dev/null
+++ b/tests/unit/auth/fixtures/__init__.py
@@ -0,0 +1,8 @@
+"""pytest's conftest as a module."""
+from .role_fixtures import *
+from .user_fixtures import *
+from .group_fixtures import *
+from .resource_fixtures import *
+# from .privilege_fixtures import *
+from .migration_fixtures import *
+from .oauth2_client_fixtures import *
diff --git a/tests/unit/auth/fixtures/group_fixtures.py b/tests/unit/auth/fixtures/group_fixtures.py
new file mode 100644
index 0000000..d7bbc56
--- /dev/null
+++ b/tests/unit/auth/fixtures/group_fixtures.py
@@ -0,0 +1,147 @@
+"""Fixtures and utilities for group-related tests"""
+import uuid
+
+import pytest
+
+from gn3.auth import db
+from gn3.auth.authorisation.groups import Group, GroupRole
+from gn3.auth.authorisation.resources import Resource, ResourceCategory
+
+from .role_fixtures import RESOURCE_EDITOR_ROLE, RESOURCE_READER_ROLE
+
+TEST_GROUP_01 = Group(uuid.UUID("9988c21d-f02f-4d45-8966-22c968ac2fbf"),
+ "TheTestGroup", {})
+TEST_GROUP_02 = Group(uuid.UUID("e37d59d7-c05e-4d67-b479-81e627d8d634"),
+ "AnotherTestGroup", {})
+TEST_GROUPS = (TEST_GROUP_01, TEST_GROUP_02)
+
+TEST_RESOURCES_GROUP_01 = (
+ Resource(TEST_GROUPS[0], uuid.UUID("26ad1668-29f5-439d-b905-84d551f85955"),
+ "ResourceG01R01",
+ ResourceCategory(uuid.UUID("48056f84-a2a6-41ac-8319-0e1e212cba2a"),
+ "genotype", "Genotype Dataset"),
+ True),
+ Resource(TEST_GROUPS[0], uuid.UUID("2130aec0-fefd-434d-92fd-9ca342348b2d"),
+ "ResourceG01R02",
+ ResourceCategory(uuid.UUID("548d684b-d4d1-46fb-a6d3-51a56b7da1b3"),
+ "phenotype", "Phenotype (Publish) Dataset"),
+ False),
+ Resource(TEST_GROUPS[0], uuid.UUID("e9a1184a-e8b4-49fb-b713-8d9cbeea5b83"),
+ "ResourceG01R03",
+ ResourceCategory(uuid.UUID("fad071a3-2fc8-40b8-992b-cdefe7dcac79"),
+ "mrna", "mRNA Dataset"),
+ False))
+
+TEST_RESOURCES_GROUP_02 = (
+ Resource(TEST_GROUPS[1], uuid.UUID("14496a1c-c234-49a2-978c-8859ea274054"),
+ "ResourceG02R01",
+ ResourceCategory(uuid.UUID("48056f84-a2a6-41ac-8319-0e1e212cba2a"),
+ "genotype", "Genotype Dataset"),
+ False),
+ Resource(TEST_GROUPS[1], uuid.UUID("04ad9e09-94ea-4390-8a02-11f92999806b"),
+ "ResourceG02R02",
+ ResourceCategory(uuid.UUID("fad071a3-2fc8-40b8-992b-cdefe7dcac79"),
+ "mrna", "mRNA Dataset"),
+ True))
+
+TEST_RESOURCES = TEST_RESOURCES_GROUP_01 + TEST_RESOURCES_GROUP_02
+TEST_RESOURCES_PUBLIC = (TEST_RESOURCES_GROUP_01[0], TEST_RESOURCES_GROUP_02[1])
+
+def __gtuple__(cursor):
+ return tuple(dict(row) for row in cursor.fetchall())
+
+@pytest.fixture(scope="function")
+def fxtr_group(conn_after_auth_migrations):# pylint: disable=[redefined-outer-name]
+ """Fixture: setup a test group."""
+ query = "INSERT INTO groups(group_id, group_name) VALUES (?, ?)"
+ with db.cursor(conn_after_auth_migrations) as cursor:
+ cursor.executemany(
+ query, tuple(
+ (str(group.group_id), group.group_name)
+ for group in TEST_GROUPS))
+
+ yield (conn_after_auth_migrations, TEST_GROUPS[0])
+
+ with db.cursor(conn_after_auth_migrations) as cursor:
+ cursor.executemany(
+ "DELETE FROM groups WHERE group_id=?",
+ ((str(group.group_id),) for group in TEST_GROUPS))
+
+@pytest.fixture(scope="function")
+def fxtr_users_in_group(fxtr_group, fxtr_users):# pylint: disable=[redefined-outer-name, unused-argument]
+ """Link the users to the groups."""
+ conn, all_users = fxtr_users
+ users = tuple(
+ user for user in all_users if user.email not in ("unaff@iliated.user",))
+ query_params = tuple(
+ (str(TEST_GROUP_01.group_id), str(user.user_id)) for user in users)
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO group_users(group_id, user_id) VALUES (?, ?)",
+ query_params)
+
+ yield (conn, TEST_GROUP_01, users)
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "DELETE FROM group_users WHERE group_id=? AND user_id=?",
+ query_params)
+
+@pytest.fixture(scope="function")
+def fxtr_group_roles(fxtr_group, fxtr_roles):# pylint: disable=[redefined-outer-name,unused-argument]
+ """Link roles to group"""
+ group_roles = (
+ GroupRole(uuid.UUID("9c25efb2-b477-4918-a95c-9914770cbf4d"),
+ TEST_GROUP_01, RESOURCE_EDITOR_ROLE),
+ GroupRole(uuid.UUID("82aed039-fe2f-408c-ab1e-81cd1ba96630"),
+ TEST_GROUP_02, RESOURCE_READER_ROLE))
+ conn, groups = fxtr_group
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO group_roles VALUES (?, ?, ?)",
+ ((str(role.group_role_id), str(role.group.group_id),
+ str(role.role.role_id))
+ for role in group_roles))
+
+ yield conn, groups, group_roles
+
+ with db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM group_user_roles_on_resources")
+ cursor.executemany(
+ ("DELETE FROM group_roles "
+ "WHERE group_role_id=? AND group_id=? AND role_id=?"),
+ ((str(role.group_role_id), str(role.group.group_id),
+ str(role.role.role_id))
+ for role in group_roles))
+
+@pytest.fixture(scope="function")
+def fxtr_group_user_roles(fxtr_resources, fxtr_group_roles, fxtr_users_in_group):#pylint: disable=[redefined-outer-name,unused-argument]
+ """Assign roles to users."""
+ conn, _groups, group_roles = fxtr_group_roles
+ _conn, group_resources = fxtr_resources
+ _conn, _group, group_users = fxtr_users_in_group
+ users = tuple(user for user in group_users if user.email
+ not in ("unaff@iliated.user", "group@lead.er"))
+ users_roles_resources = (
+ (user, RESOURCE_EDITOR_ROLE, TEST_RESOURCES_GROUP_01[1])
+ for user in users if user.email == "group@mem.ber01")
+ with db.cursor(conn) as cursor:
+ params = tuple({
+ "group_id": str(resource.group.group_id),
+ "user_id": str(user.user_id),
+ "role_id": str(role.role_id),
+ "resource_id": str(resource.resource_id)
+ } for user, role, resource in users_roles_resources)
+ cursor.executemany(
+ ("INSERT INTO group_user_roles_on_resources "
+ "VALUES (:group_id, :user_id, :role_id, :resource_id)"),
+ params)
+
+ yield conn, group_users, group_roles, group_resources
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ ("DELETE FROM group_user_roles_on_resources WHERE "
+ "group_id=:group_id AND user_id=:user_id AND role_id=:role_id AND "
+ "resource_id=:resource_id"),
+ params)
diff --git a/tests/unit/auth/fixtures/migration_fixtures.py b/tests/unit/auth/fixtures/migration_fixtures.py
new file mode 100644
index 0000000..eb42c2b
--- /dev/null
+++ b/tests/unit/auth/fixtures/migration_fixtures.py
@@ -0,0 +1,51 @@
+"""Fixtures and utilities for migration-related tests"""
+import pytest
+from yoyo.backends import DatabaseBackend
+from yoyo import get_backend, read_migrations
+from yoyo.migrations import Migration, MigrationList
+
+from gn3.auth import db
+from gn3.migrations import apply_migrations, rollback_migrations
+
+@pytest.fixture(scope="session")
+def auth_testdb_path(fxtr_app_config): # pylint: disable=redefined-outer-name
+ """Get the test application's auth database file"""
+ return fxtr_app_config["AUTH_DB"]
+
+@pytest.fixture(scope="session")
+def auth_migrations_dir(fxtr_app_config): # pylint: disable=redefined-outer-name
+ """Get the test application's auth database file"""
+ return fxtr_app_config["AUTH_MIGRATIONS"]
+
+def apply_single_migration(backend: DatabaseBackend, migration: Migration):# pylint: disable=[redefined-outer-name]
+ """Utility to apply a single migration"""
+ apply_migrations(backend, MigrationList([migration]))
+
+def rollback_single_migration(backend: DatabaseBackend, migration: Migration):# pylint: disable=[redefined-outer-name]
+ """Utility to rollback a single migration"""
+ rollback_migrations(backend, MigrationList([migration]))
+
+@pytest.fixture(scope="session")
+def backend(auth_testdb_path):# pylint: disable=redefined-outer-name
+ """Fixture: retrieve yoyo backend for auth database"""
+ return get_backend(f"sqlite:///{auth_testdb_path}")
+
+@pytest.fixture(scope="session")
+def all_migrations(auth_migrations_dir): # pylint: disable=redefined-outer-name
+ """Retrieve all the migrations"""
+ return read_migrations(auth_migrations_dir)
+
+@pytest.fixture(scope="function")
+def conn_after_auth_migrations(backend, auth_testdb_path, all_migrations): # pylint: disable=redefined-outer-name
+ """Run all migrations and return a connection to the database after"""
+ apply_migrations(backend, all_migrations)
+ with db.connection(auth_testdb_path) as conn:
+ yield conn
+
+ rollback_migrations(backend, all_migrations)
+
+def migrations_up_to(migration, migrations_dir):
+ """Run all the migration before `migration`."""
+ migrations = read_migrations(migrations_dir)
+ index = [mig.path for mig in migrations].index(migration)
+ return MigrationList(migrations[0:index])
diff --git a/tests/unit/auth/fixtures/oauth2_client_fixtures.py b/tests/unit/auth/fixtures/oauth2_client_fixtures.py
new file mode 100644
index 0000000..654d048
--- /dev/null
+++ b/tests/unit/auth/fixtures/oauth2_client_fixtures.py
@@ -0,0 +1,51 @@
+"""Fixtures for OAuth2 clients"""
+import uuid
+import json
+import datetime
+
+import pytest
+
+from gn3.auth import db
+from gn3.auth.authentication.users import hash_password
+from gn3.auth.authentication.oauth2.models.oauth2client import OAuth2Client
+
+@pytest.fixture(autouse=True)
+def fxtr_patch_envvars(monkeypatch):
+ """Fixture: patch environment variable"""
+ monkeypatch.setenv("AUTHLIB_INSECURE_TRANSPORT", "true")
+
+@pytest.fixture
+def fxtr_oauth2_clients(fxtr_users_with_passwords):
+ """Fixture: Create the OAuth2 clients for use with tests."""
+ conn, users = fxtr_users_with_passwords
+ now = datetime.datetime.now()
+
+ clients = tuple(
+ OAuth2Client(str(uuid.uuid4()), f"yabadabadoo_{idx:03}", now,
+ now + datetime.timedelta(hours = 2),
+ {
+ "client_name": f"test_client_{idx:03}",
+ "scope": ["profile", "group", "role", "resource", "register-client"],
+ "redirect_uri": "/test_oauth2",
+ "token_endpoint_auth_method": [
+ "client_secret_post", "client_secret_basic"],
+ "grant_types": ["password", "authorisation_code", "refresh_token"],
+ "response_type": "token"
+ }, user)
+ for idx, user in enumerate(users, start=1))
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO oauth2_clients VALUES (?, ?, ?, ?, ?, ?)",
+ ((str(client.client_id), hash_password(client.client_secret),
+ int(client.client_id_issued_at.timestamp()),
+ int(client.client_secret_expires_at.timestamp()),
+ json.dumps(client.client_metadata), str(client.user.user_id))
+ for client in clients))
+
+ yield conn, clients
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "DELETE FROM oauth2_clients WHERE client_id=?",
+ ((str(client.client_id),) for client in clients))
diff --git a/tests/unit/auth/fixtures/resource_fixtures.py b/tests/unit/auth/fixtures/resource_fixtures.py
new file mode 100644
index 0000000..117b4f4
--- /dev/null
+++ b/tests/unit/auth/fixtures/resource_fixtures.py
@@ -0,0 +1,25 @@
+"""Fixtures and utilities for resource-related tests"""
+import pytest
+
+from gn3.auth import db
+
+from .group_fixtures import TEST_RESOURCES
+
+@pytest.fixture(scope="function")
+def fxtr_resources(fxtr_group):# pylint: disable=[redefined-outer-name]
+ """fixture: setup test resources in the database"""
+ conn, _group = fxtr_group
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO resources VALUES (?,?,?,?,?)",
+ ((str(res.group.group_id), str(res.resource_id), res.resource_name,
+ str(res.resource_category.resource_category_id),
+ 1 if res.public else 0) for res in TEST_RESOURCES))
+
+ yield (conn, TEST_RESOURCES)
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "DELETE FROM resources WHERE group_id=? AND resource_id=?",
+ ((str(res.group.group_id), str(res.resource_id),)
+ for res in TEST_RESOURCES))
diff --git a/tests/unit/auth/fixtures/role_fixtures.py b/tests/unit/auth/fixtures/role_fixtures.py
new file mode 100644
index 0000000..ee86aa2
--- /dev/null
+++ b/tests/unit/auth/fixtures/role_fixtures.py
@@ -0,0 +1,45 @@
+"""Fixtures and utilities for role-related tests"""
+import uuid
+
+import pytest
+
+from gn3.auth import db
+from gn3.auth.authorisation.roles import Role
+from gn3.auth.authorisation.privileges import Privilege
+
+RESOURCE_READER_ROLE = Role(
+ uuid.UUID("c3ca2507-ee24-4835-9b31-8c21e1c072d3"), "resource_reader", True,
+ (Privilege("group:resource:view-resource",
+ "view a resource and use it in computations"),))
+
+RESOURCE_EDITOR_ROLE = Role(
+ uuid.UUID("89819f84-6346-488b-8955-86062e9eedb7"), "resource_editor", True,
+ (
+ Privilege("group:resource:view-resource",
+ "view a resource and use it in computations"),
+ Privilege("group:resource:edit-resource", "edit/update a resource")))
+
+TEST_ROLES = (RESOURCE_READER_ROLE, RESOURCE_EDITOR_ROLE)
+
+@pytest.fixture(scope="function")
+def fxtr_roles(conn_after_auth_migrations):
+ """Setup some example roles."""
+ with db.cursor(conn_after_auth_migrations) as cursor:
+ cursor.executemany(
+ ("INSERT INTO roles VALUES (?, ?, ?)"),
+ ((str(role.role_id), role.role_name, 1) for role in TEST_ROLES))
+ cursor.executemany(
+ ("INSERT INTO role_privileges VALUES (?, ?)"),
+ ((str(role.role_id), str(privilege.privilege_id))
+ for role in TEST_ROLES for privilege in role.privileges))
+
+ yield conn_after_auth_migrations, TEST_ROLES
+
+ with db.cursor(conn_after_auth_migrations) as cursor:
+ cursor.executemany(
+ ("DELETE FROM role_privileges WHERE role_id=? AND privilege_id=?"),
+ ((str(role.role_id), str(privilege.privilege_id))
+ for role in TEST_ROLES for privilege in role.privileges))
+ cursor.executemany(
+ ("DELETE FROM roles WHERE role_id=?"),
+ ((str(role.role_id),) for role in TEST_ROLES))
diff --git a/tests/unit/auth/fixtures/user_fixtures.py b/tests/unit/auth/fixtures/user_fixtures.py
new file mode 100644
index 0000000..d248f54
--- /dev/null
+++ b/tests/unit/auth/fixtures/user_fixtures.py
@@ -0,0 +1,66 @@
+"""Fixtures and utilities for user-related tests"""
+import uuid
+
+import pytest
+
+from gn3.auth import db
+from gn3.auth.authentication.users import User, hash_password
+
+TEST_USERS = (
+ User(uuid.UUID("ecb52977-3004-469e-9428-2a1856725c7f"), "group@lead.er",
+ "Group Leader"),
+ User(uuid.UUID("21351b66-8aad-475b-84ac-53ce528451e3"),
+ "group@mem.ber01", "Group Member 01"),
+ User(uuid.UUID("ae9c6245-0966-41a5-9a5e-20885a96bea7"),
+ "group@mem.ber02", "Group Member 02"),
+ User(uuid.UUID("9a0c7ce5-2f40-4e78-979e-bf3527a59579"),
+ "unaff@iliated.user", "Unaffiliated User"))
+
+@pytest.fixture(scope="function")
+def fxtr_users(conn_after_auth_migrations):# pylint: disable=[redefined-outer-name]
+ """Fixture: setup test users."""
+ query = "INSERT INTO users(user_id, email, name) VALUES (?, ?, ?)"
+ query_user_roles = "INSERT INTO user_roles(user_id, role_id) VALUES (?, ?)"
+ test_user_roles = (
+ ("ecb52977-3004-469e-9428-2a1856725c7f",
+ "a0e67630-d502-4b9f-b23f-6805d0f30e30"),
+ ("ecb52977-3004-469e-9428-2a1856725c7f",
+ "ade7e6b0-ba9c-4b51-87d0-2af7fe39a347"))
+ with db.cursor(conn_after_auth_migrations) as cursor:
+ cursor.executemany(query, (
+ (str(user.user_id), user.email, user.name) for user in TEST_USERS))
+ cursor.executemany(query_user_roles, test_user_roles)
+
+ yield (conn_after_auth_migrations, TEST_USERS)
+
+ with db.cursor(conn_after_auth_migrations) as cursor:
+ cursor.executemany(
+ "DELETE FROM user_roles WHERE user_id=?",
+ (("ecb52977-3004-469e-9428-2a1856725c7f",),))
+ cursor.executemany(
+ "DELETE FROM users WHERE user_id=?",
+ (("ecb52977-3004-469e-9428-2a1856725c7f",),
+ ("21351b66-8aad-475b-84ac-53ce528451e3",),
+ ("ae9c6245-0966-41a5-9a5e-20885a96bea7",),
+ ("9a0c7ce5-2f40-4e78-979e-bf3527a59579",)))
+
+@pytest.fixture(scope="function")
+def fxtr_users_with_passwords(fxtr_users): # pylint: disable=[redefined-outer-name]
+ """Fixture: add passwords to the users"""
+ conn, users = fxtr_users
+ user_passwords_params = tuple(
+ (str(user.user_id), hash_password(
+ f"password_for_user_{idx:03}".encode("utf8")))
+ for idx, user in enumerate(users, start=1))
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "INSERT INTO user_credentials VALUES (?, ?)",
+ user_passwords_params)
+
+ yield conn, users
+
+ with db.cursor(conn) as cursor:
+ cursor.executemany(
+ "DELETE FROM user_credentials WHERE user_id=?",
+ ((item[0],) for item in user_passwords_params))
diff --git a/tests/unit/auth/test_credentials.py b/tests/unit/auth/test_credentials.py
new file mode 100644
index 0000000..f2a3d25
--- /dev/null
+++ b/tests/unit/auth/test_credentials.py
@@ -0,0 +1,100 @@
+"""Test the credentials checks"""
+import pytest
+from yoyo.migrations import MigrationList
+from hypothesis import given, settings, strategies, HealthCheck
+
+from gn3.auth import db
+from gn3.auth.authentication import credentials_in_database
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+
+from tests.unit.auth.conftest import migrations_up_to
+
+@pytest.fixture
+def with_credentials_table(backend, auth_testdb_path):
+ """
+ Fixture: Yield a connection object with the 'user_credentials' table
+ created.
+ """
+ migrations_dir = "migrations/auth"
+ migration = f"{migrations_dir}/20221103_02_sGrIs-create-user-credentials-table.py"
+ migrations = (migrations_up_to(migration, migrations_dir) +
+ MigrationList([get_migration(migration)]))
+ apply_migrations(backend, migrations)
+ with db.connection(auth_testdb_path) as conn:
+ yield conn
+
+ rollback_migrations(backend, migrations)
+
+@pytest.fixture
+def with_credentials(with_credentials_table):# pylint: disable=redefined-outer-name
+ """
+ Fixture: Initialise the database with some user credentials.
+ """
+ with db.cursor(with_credentials_table) as cursor:
+ cursor.executemany(
+ "INSERT INTO users VALUES (:user_id, :email, :name)",
+ ({"user_id": "82552014-21ee-4321-b96a-b8788b97b862",
+ "email": "first@test.user",
+ "name": "First Test User"
+ },
+ {"user_id": "bdd5cb7a-072d-4c2b-9872-d0cecb718523",
+ "email": "second@test.user",
+ "name": "Second Test User"
+ }))
+ cursor.executemany(
+ "INSERT INTO user_credentials VALUES (:user_id, :password)",
+ ({"user_id": "82552014-21ee-4321-b96a-b8788b97b862",
+ "password": b'$2b$12$LAh1PYtUgAFK7d5fA0EfL.4AdTZuYEAfzwO.p.jXVboxcP8bXNj7a'
+ },
+ {"user_id": "bdd5cb7a-072d-4c2b-9872-d0cecb718523",
+ "password": b'$2b$12$zX77QCFSJuwIjAZGc0Jq5.rCWMHEMKD9Zf3Ay4C0AzwsiZ7SSPdKO'
+ }))
+
+ yield with_credentials_table
+
+ cursor.executemany("DELETE FROM user_credentials WHERE user_id=?",
+ (("82552014-21ee-4321-b96a-b8788b97b862",),
+ ("bdd5cb7a-072d-4c2b-9872-d0cecb718523",)))
+ cursor.executemany("DELETE FROM users WHERE user_id=?",
+ (("82552014-21ee-4321-b96a-b8788b97b862",),
+ ("bdd5cb7a-072d-4c2b-9872-d0cecb718523",)))
+
+@pytest.mark.unit_test
+@given(strategies.emails(), strategies.text())
+@settings(suppress_health_check=[HealthCheck.function_scoped_fixture])
+def test_credentials_not_in_database(with_credentials, email, password):# pylint: disable=redefined-outer-name
+ """
+ GIVEN: credentials that do not exist in the database
+ WHEN: the `credentials_in_database` function is run against the credentials
+ THEN: check that the function returns false in all cases.
+ """
+ with db.cursor(with_credentials) as cursor:
+ assert credentials_in_database(cursor, email, password) is False
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "email,password",
+ (("first@test.user", "wrongpassword"),
+ ("first@tes.user", "testuser01")))
+def test_partially_wrong_credentials(with_credentials, email, password):# pylint: disable=redefined-outer-name
+ """
+ GIVEN: credentials that exist in the database
+ WHEN: the credentials are checked with partially wrong values
+ THEN: the check fails since the credentials are not correct
+ """
+ with db.cursor(with_credentials) as cursor:
+ assert credentials_in_database(cursor, email, password) is False
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "email,password",
+ (("first@test.user", "testuser01"),
+ ("second@test.user", "testuser02")))
+def test_partially_correct_credentials(with_credentials, email, password):# pylint: disable=redefined-outer-name
+ """
+ GIVEN: credentials that exist in the database
+ WHEN: the credentials are checked with correct values
+ THEN: the check passes
+ """
+ with db.cursor(with_credentials) as cursor:
+ assert credentials_in_database(cursor, email, password) is True
diff --git a/tests/unit/auth/test_groups.py b/tests/unit/auth/test_groups.py
new file mode 100644
index 0000000..4824e14
--- /dev/null
+++ b/tests/unit/auth/test_groups.py
@@ -0,0 +1,168 @@
+"""Test functions dealing with group management."""
+from uuid import UUID
+
+import pytest
+from pymonad.maybe import Nothing
+
+from gn3.auth import db
+from gn3.auth.authentication.users import User
+from gn3.auth.authorisation.roles import Role
+from gn3.auth.authorisation.privileges import Privilege
+from gn3.auth.authorisation.errors import AuthorisationError
+from gn3.auth.authorisation.groups.models import (
+ Group, GroupRole, user_group, create_group, create_group_role)
+
+from tests.unit.auth import conftest
+
+create_group_failure = {
+ "status": "error",
+ "message": "Unauthorised: Failed to create group."
+}
+
+uuid_fn = lambda : UUID("d32611e3-07fc-4564-b56c-786c6db6de2b")
+
+GROUP = Group(UUID("9988c21d-f02f-4d45-8966-22c968ac2fbf"), "TheTestGroup",
+ {"group_description": "The test group"})
+PRIVILEGES = (
+ Privilege(
+ "group:resource:view-resource",
+ "view a resource and use it in computations"),
+ Privilege("group:resource:edit-resource", "edit/update a resource"))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected", tuple(zip(conftest.TEST_USERS[0:1], (
+ Group(
+ UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"), "a_test_group",
+ {"group_description": "A test group"}),
+ create_group_failure, create_group_failure, create_group_failure,
+ create_group_failure))))
+def test_create_group(# pylint: disable=[too-many-arguments]
+ fxtr_app, auth_testdb_path, mocker, fxtr_users, user, expected):# pylint: disable=[unused-argument]
+ """
+ GIVEN: an authenticated user
+ WHEN: the user attempts to create a group
+ THEN: verify they are only able to create the group if they have the
+ appropriate privileges
+ """
+ mocker.patch("gn3.auth.authorisation.groups.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ with db.connection(auth_testdb_path) as conn:
+ assert create_group(
+ conn, "a_test_group", user, "A test group") == expected
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("user", conftest.TEST_USERS[1:])
+def test_create_group_raises_exception_with_non_privileged_user(# pylint: disable=[too-many-arguments]
+ fxtr_app, auth_testdb_path, mocker, fxtr_users, user):# pylint: disable=[unused-argument]
+ """
+ GIVEN: an authenticated user, without appropriate privileges
+ WHEN: the user attempts to create a group
+ THEN: verify the system raises an exception
+ """
+ mocker.patch("gn3.auth.authorisation.groups.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ with db.connection(auth_testdb_path) as conn:
+ with pytest.raises(AuthorisationError):
+ assert create_group(conn, "a_test_group", user, "A test group")
+
+create_role_failure = {
+ "status": "error",
+ "message": "Unauthorised: Could not create the group role"
+}
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected", tuple(zip(conftest.TEST_USERS[0:1], (
+ GroupRole(
+ UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"),
+ GROUP,
+ Role(UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"),
+ "ResourceEditor", True, PRIVILEGES)),))))
+def test_create_group_role(mocker, fxtr_users_in_group, user, expected):
+ """
+ GIVEN: an authenticated user
+ WHEN: the user attempts to create a role, attached to a group
+ THEN: verify they are only able to create the role if they have the
+ appropriate privileges and that the role is attached to the given group
+ """
+ mocker.patch("gn3.auth.authorisation.groups.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.roles.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ conn, _group, _users = fxtr_users_in_group
+ with db.cursor(conn) as cursor:
+ assert create_group_role(
+ conn, GROUP, "ResourceEditor", PRIVILEGES) == expected
+ # cleanup
+ cursor.execute(
+ ("DELETE FROM group_roles "
+ "WHERE group_role_id=? AND group_id=? AND role_id=?"),
+ (str(uuid_fn()), str(GROUP.group_id), str(uuid_fn())))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected", tuple(zip(conftest.TEST_USERS[1:], (
+ create_role_failure, create_role_failure, create_role_failure))))
+def test_create_group_role_raises_exception_with_unauthorised_users(
+ mocker, fxtr_users_in_group, user, expected):
+ """
+ GIVEN: an authenticated user
+ WHEN: the user attempts to create a role, attached to a group
+ THEN: verify they are only able to create the role if they have the
+ appropriate privileges and that the role is attached to the given group
+ """
+ mocker.patch("gn3.auth.authorisation.groups.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.roles.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ conn, _group, _users = fxtr_users_in_group
+ with pytest.raises(AuthorisationError):
+ assert create_group_role(
+ conn, GROUP, "ResourceEditor", PRIVILEGES) == expected
+
+@pytest.mark.unit_test
+def test_create_multiple_groups(mocker, fxtr_users):
+ """
+ GIVEN: An authenticated user with appropriate authorisation
+ WHEN: The user attempts to create a new group, while being a member of an
+ existing group
+ THEN: The system should prevent that, and respond with an appropriate error
+ message
+ """
+ mocker.patch("gn3.auth.authorisation.groups.models.uuid4", uuid_fn)
+ user = User(
+ UUID("ecb52977-3004-469e-9428-2a1856725c7f"), "group@lead.er",
+ "Group Leader")
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ conn, _test_users = fxtr_users
+ # First time, successfully creates the group
+ assert create_group(conn, "a_test_group", user) == Group(
+ UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"), "a_test_group",
+ {})
+ # subsequent attempts should fail
+ with pytest.raises(AuthorisationError):
+ create_group(conn, "another_test_group", user)
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected",
+ tuple(zip(
+ conftest.TEST_USERS,
+ (([Group(UUID("9988c21d-f02f-4d45-8966-22c968ac2fbf"), "TheTestGroup", {})] * 3)
+ + [Nothing]))))
+def test_user_group(fxtr_users_in_group, user, expected):
+ """
+ GIVEN: A bunch of registered users, some of whom are members of a group, and
+ others are not
+ WHEN: a particular user's group is requested,
+ THEN: return a Maybe containing the group that the user belongs to, or
+ Nothing
+ """
+ conn, _group, _users = fxtr_users_in_group
+ assert (
+ user_group(conn, user).maybe(Nothing, lambda val: val)
+ == expected)
diff --git a/tests/unit/auth/test_migrations_add_data_to_table.py b/tests/unit/auth/test_migrations_add_data_to_table.py
new file mode 100644
index 0000000..9cb5d0c
--- /dev/null
+++ b/tests/unit/auth/test_migrations_add_data_to_table.py
@@ -0,0 +1,79 @@
+"""Test data insertion when migrations are run."""
+import pytest
+
+from gn3.auth import db
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+test_params = (
+ ("20221116_01_nKUmX-add-privileges-to-group-leader-role.py",
+ ("SELECT role_id, privilege_id FROM role_privileges "
+ "WHERE role_id=? AND privilege_id IN (?, ?, ?, ?)"),
+ ("a0e67630-d502-4b9f-b23f-6805d0f30e30",
+ "221660b1-df05-4be1-b639-f010269dbda9",
+ "7bcca363-cba9-4169-9e31-26bdc6179b28",
+ "5103cc68-96f8-4ebb-83a4-a31692402c9b",
+ "1c59eff5-9336-4ed2-a166-8f70d4cb012e"),
+ (("a0e67630-d502-4b9f-b23f-6805d0f30e30",
+ "221660b1-df05-4be1-b639-f010269dbda9"),
+ ("a0e67630-d502-4b9f-b23f-6805d0f30e30",
+ "7bcca363-cba9-4169-9e31-26bdc6179b28"),
+ ("a0e67630-d502-4b9f-b23f-6805d0f30e30",
+ "5103cc68-96f8-4ebb-83a4-a31692402c9b"),
+ ("a0e67630-d502-4b9f-b23f-6805d0f30e30",
+ "1c59eff5-9336-4ed2-a166-8f70d4cb012e"))),)
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("migration_file,query,query_params,data", test_params)
+def test_apply_insert(# pylint: disable=[too-many-arguments]
+ auth_migrations_dir, backend, auth_testdb_path, migration_file, query,
+ query_params, data):
+ """
+ GIVEN: a database migration script
+ WHEN: the script is applied
+ THEN: ensure the given data exists in the table
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path, None) as conn, db.cursor(conn) as cursor:
+ cursor.execute(query, query_params)
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(query, query_params)
+ result_after_migration = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations + [the_migration])
+ assert len(result_before_migration) == 0, "Expected no results before migration"
+ assert sorted(result_after_migration) == sorted(data)
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("migration_file,query,query_params,data", test_params)
+def test_rollback_insert(# pylint: disable=[too-many-arguments]
+ auth_migrations_dir, backend, auth_testdb_path, migration_file, query,
+ query_params, data):
+ """
+ GIVEN: a database migration script
+ WHEN: the script is rolled back
+ THEN: ensure the given data no longer exists in the database
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path, None) as conn, db.cursor(conn) as cursor:
+ cursor.execute(query, query_params)
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(query, query_params)
+ result_after_migration = cursor.fetchall()
+ rollback_single_migration(backend, the_migration)
+ cursor.execute(query, query_params)
+ result_after_rollback = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations)
+ assert len(result_before_migration) == 0, "Expected no results before migration"
+ assert sorted(result_after_migration) == sorted(data)
+ assert len(result_after_rollback) == 0, "Expected no results after rollback"
diff --git a/tests/unit/auth/test_migrations_add_remove_columns.py b/tests/unit/auth/test_migrations_add_remove_columns.py
new file mode 100644
index 0000000..ea9bf7b
--- /dev/null
+++ b/tests/unit/auth/test_migrations_add_remove_columns.py
@@ -0,0 +1,116 @@
+"""Test migrations that alter tables adding/removing columns."""
+import pytest
+
+from gn3.auth import db
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+QUERY = "SELECT sql FROM sqlite_schema WHERE name=?"
+
+TEST_PARAMS = (
+ ("20221109_01_HbD5F-add-resource-meta-field-to-resource-categories-field.py",
+ "resource_categories", "resource_meta TEXT", True),
+ (("20221110_08_23psB-add-privilege-category-and-privilege-description-"
+ "columns-to-privileges-table.py"),
+ "privileges", "privilege_category TEXT", True),
+ (("20221110_08_23psB-add-privilege-category-and-privilege-description-"
+ "columns-to-privileges-table.py"),
+ "privileges", "privilege_description TEXT", True),
+ ("20221117_01_RDlfx-modify-group-roles-add-group-role-id.py", "group_roles",
+ "group_role_id", True),
+ ("20221208_01_sSdHz-add-public-column-to-resources-table.py", "resources",
+ "public", True))
+
+def found(haystack: str, needle: str) -> bool:
+ """Check whether `needle` is found in `haystack`"""
+ return any(
+ (line.strip().find(needle) >= 0) for line in haystack.split("\n"))
+
+def pristine_before_migration(adding: bool, result_str: str, column: str) -> bool:
+ """Check that database is pristine before running the migration"""
+ col_was_found = found(result_str, column)
+ if adding:
+ return not col_was_found
+ return col_was_found
+
+def applied_successfully(adding: bool, result_str: str, column: str) -> bool:
+ """Check that the migration ran successfully"""
+ col_was_found = found(result_str, column)
+ if adding:
+ return col_was_found
+ return not col_was_found
+
+def rolled_back_successfully(adding: bool, result_str: str, column: str) -> bool:
+ """Check that the migration ran successfully"""
+ col_was_found = found(result_str, column)
+ if adding:
+ return not col_was_found
+ return col_was_found
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "migration_file,the_table,the_column,adding", TEST_PARAMS)
+def test_apply_add_remove_column(# pylint: disable=[too-many-arguments]
+ auth_migrations_dir, auth_testdb_path, backend, migration_file,
+ the_table, the_column, adding):
+ """
+ GIVEN: A migration that alters a table, adding or removing a column
+ WHEN: The migration is applied
+ THEN: Ensure the column exists if `adding` is True, otherwise, ensure the
+ column has been dropped
+ """
+ migration_path = f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ cursor.execute(QUERY, (the_table,))
+ results_before_migration = cursor.fetchone()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(QUERY, (the_table,))
+ results_after_migration = cursor.fetchone()
+
+ rollback_migrations(backend, older_migrations + [the_migration])
+
+ assert pristine_before_migration(
+ adding, results_before_migration[0], the_column), (
+ f"Column `{the_column}` exists before migration and should not"
+ if adding else
+ f"Column `{the_column}` doesn't exist before migration and it should")
+ assert applied_successfully(
+ adding, results_after_migration[0], the_column), "Migration failed"
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "migration_file,the_table,the_column,adding", TEST_PARAMS)
+def test_rollback_add_remove_column(# pylint: disable=[too-many-arguments]
+ auth_migrations_dir, auth_testdb_path, backend, migration_file,
+ the_table, the_column, adding):
+ """
+ GIVEN: A migration that alters a table, adding or removing a column
+ WHEN: The migration is applied
+ THEN: Ensure the column is dropped if `adding` is True, otherwise, ensure
+ the column has been restored
+ """
+ migration_path = f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ apply_single_migration(backend, the_migration)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ cursor.execute(QUERY, (the_table,))
+ results_before_rollback = cursor.fetchone()
+ rollback_single_migration(backend, the_migration)
+ cursor.execute(QUERY, (the_table,))
+ results_after_rollback = cursor.fetchone()
+
+ rollback_migrations(backend, older_migrations + [the_migration])
+
+ assert pristine_before_migration(
+ not adding, results_before_rollback[0], the_column), (
+ f"Column `{the_column}` doesn't exist before rollback and it should"
+ if adding else
+ f"Column `{the_column}` exists before rollback and should not")
+ assert rolled_back_successfully(
+ adding, results_after_rollback[0], the_column), "Rollback failed"
diff --git a/tests/unit/auth/test_migrations_create_tables.py b/tests/unit/auth/test_migrations_create_tables.py
new file mode 100644
index 0000000..2b8140b
--- /dev/null
+++ b/tests/unit/auth/test_migrations_create_tables.py
@@ -0,0 +1,91 @@
+"""Test migrations that create tables"""
+import pytest
+
+from gn3.auth import db
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+migrations_and_tables = (
+ ("20221103_01_js9ub-initialise-the-auth-entic-oris-ation-database.py",
+ "users"),
+ ("20221103_02_sGrIs-create-user-credentials-table.py", "user_credentials"),
+ ("20221108_01_CoxYh-create-the-groups-table.py", "groups"),
+ ("20221108_02_wxTr9-create-privileges-table.py", "privileges"),
+ ("20221108_03_Pbhb1-create-resource-categories-table.py", "resource_categories"),
+ ("20221110_01_WtZ1I-create-resources-table.py", "resources"),
+ ("20221110_05_BaNtL-create-roles-table.py", "roles"),
+ ("20221110_06_Pq2kT-create-generic-roles-table.py", "generic_roles"),
+ ("20221110_07_7WGa1-create-role-privileges-table.py", "role_privileges"),
+ ("20221114_01_n8gsF-create-generic-role-privileges-table.py",
+ "generic_role_privileges"),
+ ("20221114_03_PtWjc-create-group-roles-table.py", "group_roles"),
+ ("20221114_05_hQun6-create-user-roles-table.py", "user_roles"),
+ ("20221117_02_fmuZh-create-group-users-table.py", "group_users"),
+ ("20221206_01_BbeF9-create-group-user-roles-on-resources-table.py",
+ "group_user_roles_on_resources"),
+ ("20221219_01_CI3tN-create-oauth2-clients-table.py", "oauth2_clients"),
+ ("20221219_02_buSEU-create-oauth2-tokens-table.py", "oauth2_tokens"),
+ ("20221219_03_PcTrb-create-authorisation-code-table.py",
+ "authorisation_code"),
+ ("20230207_01_r0bkZ-create-group-join-requests-table.py",
+ "group_join_requests"),
+ ("20230322_01_0dDZR-create-linked-phenotype-data-table.py",
+ "linked_phenotype_data"),
+ ("20230322_02_Ll854-create-phenotype-resources-table.py",
+ "phenotype_resources"),
+ ("20230404_01_VKxXg-create-linked-genotype-data-table.py",
+ "linked_genotype_data"),
+ ("20230404_02_la33P-create-genotype-resources-table.py",
+ "genotype_resources"),
+ ("20230410_01_8mwaf-create-linked-mrna-data-table.py", "linked_mrna_data"),
+ ("20230410_02_WZqSf-create-mrna-resources-table.py", "mrna_resources"))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("migration_file,the_table", migrations_and_tables)
+def test_create_table(
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ the_table):
+ """
+ GIVEN: A database migration script to create table, `the_table`
+ WHEN: The migration is applied
+ THEN: Ensure that the table `the_table` is created
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, get_migration(migration_path))
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_after_migration = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations)
+ assert the_table not in [row[0] for row in result_before_migration]
+ assert the_table in [row[0] for row in result_after_migration]
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("migration_file,the_table", migrations_and_tables)
+def test_rollback_create_table(
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ the_table):
+ """
+ GIVEN: A database migration script to create the table `the_table`
+ WHEN: The migration is rolled back
+ THEN: Ensure that the table `the_table` no longer exists
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ apply_single_migration(backend, get_migration(migration_path))
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_after_migration = cursor.fetchall()
+ rollback_single_migration(backend, get_migration(migration_path))
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_after_rollback = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations)
+ assert the_table in [row[0] for row in result_after_migration]
+ assert the_table not in [row[0] for row in result_after_rollback]
diff --git a/tests/unit/auth/test_migrations_drop_tables.py b/tests/unit/auth/test_migrations_drop_tables.py
new file mode 100644
index 0000000..2362c77
--- /dev/null
+++ b/tests/unit/auth/test_migrations_drop_tables.py
@@ -0,0 +1,63 @@
+"""Test migrations that create tables"""
+
+import pytest
+
+from gn3.auth import db
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+test_params = (
+ ("20221114_02_DKKjn-drop-generic-role-tables.py", "generic_roles"),
+ ("20221114_02_DKKjn-drop-generic-role-tables.py", "generic_role_privileges"))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("migration_file,the_table", test_params)
+def test_drop_table(
+ auth_testdb_path, auth_migrations_dir, backend,
+ migration_file, the_table):
+ """
+ GIVEN: A database migration script to create table, `the_table`
+ WHEN: The migration is applied
+ THEN: Ensure that the table `the_table` is created
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_after_migration = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations + [the_migration])
+ assert the_table in [row[0] for row in result_before_migration]
+ assert the_table not in [row[0] for row in result_after_migration]
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize("migration_file,the_table", test_params)
+def test_rollback_drop_table(
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ the_table):
+ """
+ GIVEN: A database migration script to create the table `the_table`
+ WHEN: The migration is rolled back
+ THEN: Ensure that the table `the_table` no longer exists
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ apply_single_migration(backend, the_migration)
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_after_migration = cursor.fetchall()
+ rollback_single_migration(backend, the_migration)
+ cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'")
+ result_after_rollback = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations)
+ assert the_table not in [row[0] for row in result_after_migration]
+ assert the_table in [row[0] for row in result_after_rollback]
diff --git a/tests/unit/auth/test_migrations_indexes.py b/tests/unit/auth/test_migrations_indexes.py
new file mode 100644
index 0000000..b1f06d9
--- /dev/null
+++ b/tests/unit/auth/test_migrations_indexes.py
@@ -0,0 +1,97 @@
+"""Test that indexes are created and removed."""
+import pytest
+
+from gn3.auth import db
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+QUERY = """
+SELECT name FROM sqlite_master WHERE type='index' AND tbl_name = ?
+AND name= ?
+"""
+
+migrations_tables_and_indexes = (
+ ("20221110_07_7WGa1-create-role-privileges-table.py", "role_privileges",
+ "idx_tbl_role_privileges_cols_role_id"),
+ ("20221114_01_n8gsF-create-generic-role-privileges-table.py",
+ "generic_role_privileges",
+ "idx_tbl_generic_role_privileges_cols_generic_role_id"),
+ ("20221114_03_PtWjc-create-group-roles-table.py", "group_roles",
+ "idx_tbl_group_roles_cols_group_id"),
+ ("20221114_05_hQun6-create-user-roles-table.py", "user_roles",
+ "idx_tbl_user_roles_cols_user_id"),
+ ("20221117_02_fmuZh-create-group-users-table.py", "group_users",
+ "tbl_group_users_cols_group_id"),
+ ("20221206_01_BbeF9-create-group-user-roles-on-resources-table.py",
+ "group_user_roles_on_resources",
+ "idx_tbl_group_user_roles_on_resources_group_user_resource"))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "migration_file,the_table,the_index", migrations_tables_and_indexes)
+def test_index_created(# pylint: disable=[too-many-arguments]
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ the_table, the_index):
+ """
+ GIVEN: A database migration
+ WHEN: The migration is applied
+ THEN: Ensure the given index is created for the provided table
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ query_params = (the_table, the_index)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ cursor.execute(QUERY, query_params)
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(QUERY, query_params)
+ result_after_migration = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations + [the_migration])
+ assert the_index not in [row[0] for row in result_before_migration], (
+ f"Index '{the_index}' was found for table '{the_table}' before migration.")
+ assert (
+ len(result_after_migration) == 1
+ and result_after_migration[0][0] == the_index), (
+ f"Index '{the_index}' was not found for table '{the_table}' after migration.")
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "migration_file,the_table,the_index", migrations_tables_and_indexes)
+def test_index_dropped(# pylint: disable=[too-many-arguments]
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ the_table, the_index):
+ """
+ GIVEN: A database migration
+ WHEN: The migration is rolled-back
+ THEN: Ensure the given index no longer exists for the given table
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ query_params = (the_table, the_index)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ cursor.execute(QUERY, query_params)
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(QUERY, query_params)
+ result_after_migration = cursor.fetchall()
+ rollback_single_migration(backend, the_migration)
+ cursor.execute(QUERY, query_params)
+ result_after_rollback = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations)
+ assert the_index not in [row[0] for row in result_before_migration], (
+ f"Index '{the_index}' was found for table '{the_table}' before "
+ "migration")
+ assert (
+ len(result_after_migration) == 1
+ and result_after_migration[0][0] == the_index), (
+ f"Index '{the_index}' was not found for table '{the_table}' after migration.")
+ assert the_index not in [row[0] for row in result_after_rollback], (
+ f"Index '{the_index}' was found for table '{the_table}' after "
+ "rollback")
diff --git a/tests/unit/auth/test_migrations_init_data_in_resource_categories_table.py b/tests/unit/auth/test_migrations_init_data_in_resource_categories_table.py
new file mode 100644
index 0000000..dd3d4c6
--- /dev/null
+++ b/tests/unit/auth/test_migrations_init_data_in_resource_categories_table.py
@@ -0,0 +1,60 @@
+"""
+Test that the `resource_categories` table is initialised with the startup data.
+"""
+import pytest
+
+from gn3.auth import db
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+MIGRATION_PATH = "migrations/auth/20221108_04_CKcSL-init-data-in-resource-categories-table.py"
+
+@pytest.mark.unit_test
+def test_apply_init_data(auth_testdb_path, auth_migrations_dir, backend):
+ """
+ GIVEN: A migration script
+ WHEN: The migration is applied
+ THEN: Verify that the expected data exists in the table
+ """
+ older_migrations = migrations_up_to(MIGRATION_PATH, auth_migrations_dir)
+ the_migration = get_migration(MIGRATION_PATH)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path, None) as conn, db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM resource_categories")
+ assert len(cursor.fetchall()) == 0, "Expected empty table."
+ apply_single_migration(backend, the_migration)
+ cursor.execute("SELECT * FROM resource_categories")
+ results = cursor.fetchall()
+ assert len(results) == 3, "Expected 3 rows of data."
+ assert sorted(results) == sorted((
+ ('fad071a3-2fc8-40b8-992b-cdefe7dcac79', 'mrna', 'mRNA Dataset'),
+ ('548d684b-d4d1-46fb-a6d3-51a56b7da1b3', 'phenotype',
+ 'Phenotype (Publish) Dataset'),
+ ('48056f84-a2a6-41ac-8319-0e1e212cba2a', 'genotype',
+ 'Genotype Dataset')))
+
+ rollback_migrations(backend, older_migrations + [the_migration])
+
+@pytest.mark.unit_test
+def test_rollback_init_data(auth_testdb_path, auth_migrations_dir, backend):
+ """
+ GIVEN: A migration script
+ WHEN: The migration is rolled back
+ THEN: Verify that the table is empty
+ """
+ older_migrations = migrations_up_to(MIGRATION_PATH, auth_migrations_dir)
+ the_migration = get_migration(MIGRATION_PATH)
+ apply_migrations(backend, older_migrations)
+ with db.connection(auth_testdb_path, None) as conn, db.cursor(conn) as cursor:
+ cursor.execute("SELECT * FROM resource_categories")
+ assert len(cursor.fetchall()) == 0, "Expected empty table."
+ apply_single_migration(backend, the_migration)
+ cursor.execute("SELECT * FROM resource_categories")
+ results = cursor.fetchall()
+ assert len(results) == 3, "Expected 3 rows of data."
+ rollback_single_migration(backend, the_migration)
+ cursor.execute("SELECT * FROM resource_categories")
+ assert len(cursor.fetchall()) == 0, "Expected empty table."
+
+ rollback_migrations(backend, older_migrations)
diff --git a/tests/unit/auth/test_migrations_insert_data_into_empty_table.py b/tests/unit/auth/test_migrations_insert_data_into_empty_table.py
new file mode 100644
index 0000000..ebb7fa6
--- /dev/null
+++ b/tests/unit/auth/test_migrations_insert_data_into_empty_table.py
@@ -0,0 +1,77 @@
+"""Test data insertion when migrations are run."""
+import sqlite3
+from contextlib import closing
+
+import pytest
+
+from gn3.migrations import get_migration, apply_migrations, rollback_migrations
+from tests.unit.auth.conftest import (
+ apply_single_migration, rollback_single_migration, migrations_up_to)
+
+test_params = (
+ ("20221113_01_7M0hv-enumerate-initial-privileges.py", "privileges", 19),
+ ("20221114_04_tLUzB-initialise-basic-roles.py", "roles", 2),
+ ("20221114_04_tLUzB-initialise-basic-roles.py", "role_privileges", 15))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "migration_file,table,row_count", test_params)
+def test_apply_insert(# pylint: disable=[too-many-arguments]
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ table, row_count):
+ """
+ GIVEN: A database migration
+ WHEN: The migration is applied
+ THEN: Ensure the given number of rows are inserted into the table
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with closing(sqlite3.connect(auth_testdb_path)) as conn, closing(conn.cursor()) as cursor:
+ query = f"SELECT COUNT(*) FROM {table}"
+ cursor.execute(query)
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(query)
+ result_after_migration = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations+[the_migration])
+ assert result_before_migration[0][0] == 0, (
+ "Expected empty table before initialisation")
+ assert result_after_migration[0][0] == row_count, (
+ f"Expected {row_count} rows")
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "migration_file,table,row_count", test_params)
+def test_rollback_insert(# pylint: disable=[too-many-arguments]
+ auth_testdb_path, auth_migrations_dir, backend, migration_file,
+ table, row_count):
+ """
+ GIVEN: A database migration
+ WHEN: The migration is applied
+ THEN: Ensure the given number of rows are inserted into the table
+ """
+ migration_path=f"{auth_migrations_dir}/{migration_file}"
+ older_migrations = migrations_up_to(migration_path, auth_migrations_dir)
+ the_migration = get_migration(migration_path)
+ apply_migrations(backend, older_migrations)
+ with closing(sqlite3.connect(auth_testdb_path)) as conn, closing(conn.cursor()) as cursor:
+ query = f"SELECT COUNT(*) FROM {table}"
+ cursor.execute(query)
+ result_before_migration = cursor.fetchall()
+ apply_single_migration(backend, the_migration)
+ cursor.execute(query)
+ result_after_migration = cursor.fetchall()
+ rollback_single_migration(backend, the_migration)
+ cursor.execute(query)
+ result_after_rollback = cursor.fetchall()
+
+ rollback_migrations(backend, older_migrations)
+ assert result_before_migration[0][0] == 0, (
+ "Expected empty table before initialisation")
+ assert result_after_migration[0][0] == row_count, (
+ f"Expected {row_count} rows")
+ assert result_after_rollback[0][0] == 0, (
+ "Expected empty table after rollback")
diff --git a/tests/unit/auth/test_privileges.py b/tests/unit/auth/test_privileges.py
new file mode 100644
index 0000000..8395293
--- /dev/null
+++ b/tests/unit/auth/test_privileges.py
@@ -0,0 +1,46 @@
+"""Test the privileges module"""
+import pytest
+
+from gn3.auth import db
+from gn3.auth.authorisation.privileges import Privilege, user_privileges
+
+from tests.unit.auth import conftest
+
+SORT_KEY = lambda x: x.privilege_id
+
+PRIVILEGES = sorted(
+ (Privilege("system:group:create-group", "Create a group"),
+ Privilege("system:group:view-group", "View the details of a group"),
+ Privilege("system:group:edit-group", "Edit the details of a group"),
+ Privilege("system:user:list", "List users in the system"),
+ Privilege("system:group:delete-group", "Delete a group"),
+ Privilege("group:user:add-group-member", "Add a user to a group"),
+ Privilege("group:user:remove-group-member", "Remove a user from a group"),
+ Privilege("system:group:transfer-group-leader",
+ "Transfer leadership of the group to some other member"),
+
+ Privilege("group:resource:create-resource", "Create a resource object"),
+ Privilege("group:resource:view-resource",
+ "view a resource and use it in computations"),
+ Privilege("group:resource:edit-resource", "edit/update a resource"),
+ Privilege("group:resource:delete-resource", "Delete a resource"),
+
+ Privilege("group:role:create-role", "Create a new role"),
+ Privilege("group:role:edit-role", "edit/update an existing role"),
+ Privilege("group:user:assign-role", "Assign a role to an existing user"),
+ Privilege("group:role:delete-role", "Delete an existing role")),
+ key=SORT_KEY)
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected", tuple(zip(
+ conftest.TEST_USERS, (PRIVILEGES, [], [], [], []))))
+def test_user_privileges(auth_testdb_path, fxtr_users, user, expected):# pylint: disable=[unused-argument]
+ """
+ GIVEN: A user
+ WHEN: An attempt is made to fetch the user's privileges
+ THEN: Ensure only
+ """
+ with db.connection(auth_testdb_path) as conn:
+ assert sorted(
+ user_privileges(conn, user), key=SORT_KEY) == expected
diff --git a/tests/unit/auth/test_resources.py b/tests/unit/auth/test_resources.py
new file mode 100644
index 0000000..2884add
--- /dev/null
+++ b/tests/unit/auth/test_resources.py
@@ -0,0 +1,117 @@
+"""Test resource-management functions"""
+import uuid
+
+import pytest
+
+from gn3.auth import db
+
+from gn3.auth.authorisation.groups import Group
+from gn3.auth.authorisation.errors import AuthorisationError
+from gn3.auth.authorisation.resources.models import (
+ Resource, user_resources, create_resource, ResourceCategory,
+ public_resources)
+
+from tests.unit.auth import conftest
+
+group = Group(uuid.UUID("9988c21d-f02f-4d45-8966-22c968ac2fbf"), "TheTestGroup",
+ {})
+resource_category = ResourceCategory(
+ uuid.UUID("fad071a3-2fc8-40b8-992b-cdefe7dcac79"), "mrna", "mRNA Dataset")
+create_resource_failure = {
+ "status": "error",
+ "message": "Unauthorised: Could not create resource"
+}
+uuid_fn = lambda : uuid.UUID("d32611e3-07fc-4564-b56c-786c6db6de2b")
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected",
+ tuple(zip(
+ conftest.TEST_USERS[0:1],
+ (Resource(
+ group, uuid.UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"),
+ "test_resource", resource_category, False),))))
+def test_create_resource(mocker, fxtr_users_in_group, user, expected):
+ """Test that resource creation works as expected."""
+ mocker.patch("gn3.auth.authorisation.resources.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ conn, _group, _users = fxtr_users_in_group
+ resource = create_resource(
+ conn, "test_resource", resource_category, user, False)
+ assert resource == expected
+
+ with db.cursor(conn) as cursor:
+ # Cleanup
+ cursor.execute(
+ "DELETE FROM group_user_roles_on_resources WHERE resource_id=?",
+ (str(resource.resource_id),))
+ cursor.execute(
+ "DELETE FROM group_roles WHERE group_id=?",
+ (str(resource.group.group_id),))
+ cursor.execute(
+ "DELETE FROM resources WHERE resource_id=?",
+ (str(resource.resource_id),))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected",
+ tuple(zip(
+ conftest.TEST_USERS[1:],
+ (create_resource_failure, create_resource_failure,
+ create_resource_failure))))
+def test_create_resource_raises_for_unauthorised_users(
+ mocker, fxtr_users_in_group, user, expected):
+ """Test that resource creation works as expected."""
+ mocker.patch("gn3.auth.authorisation.resources.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ conn, _group, _users = fxtr_users_in_group
+ with pytest.raises(AuthorisationError):
+ assert create_resource(
+ conn, "test_resource", resource_category, user, False) == expected
+
+SORTKEY = lambda resource: resource.resource_id
+
+@pytest.mark.unit_test
+def test_public_resources(fxtr_resources):
+ """
+ GIVEN: some resources in the database
+ WHEN: public resources are requested
+ THEN: only list the resources that are public
+ """
+ conn, _res = fxtr_resources
+ assert sorted(public_resources(conn), key=SORTKEY) == sorted(tuple(
+ res for res in conftest.TEST_RESOURCES if res.public), key=SORTKEY)
+
+PUBLIC_RESOURCES = sorted(
+ {res.resource_id: res for res in conftest.TEST_RESOURCES_PUBLIC}.values(),
+ key=SORTKEY)
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected",
+ tuple(zip(
+ conftest.TEST_USERS,
+ (sorted(
+ {res.resource_id: res for res in
+ (conftest.TEST_RESOURCES_GROUP_01 +
+ conftest.TEST_RESOURCES_PUBLIC)}.values(),
+ key=SORTKEY),
+ sorted(
+ {res.resource_id: res for res in
+ ((conftest.TEST_RESOURCES_GROUP_01[1],) +
+ conftest.TEST_RESOURCES_PUBLIC)}.values()
+ ,
+ key=SORTKEY),
+ PUBLIC_RESOURCES, PUBLIC_RESOURCES))))
+def test_user_resources(fxtr_group_user_roles, user, expected):
+ """
+ GIVEN: some resources in the database
+ WHEN: a particular user's resources are requested
+ THEN: list only the resources for which the user can access
+ """
+ conn, *_others = fxtr_group_user_roles
+ assert sorted(
+ {res.resource_id: res for res in user_resources(conn, user)
+ }.values(), key=SORTKEY) == expected
diff --git a/tests/unit/auth/test_roles.py b/tests/unit/auth/test_roles.py
new file mode 100644
index 0000000..02fd9f7
--- /dev/null
+++ b/tests/unit/auth/test_roles.py
@@ -0,0 +1,123 @@
+"""Test functions dealing with group management."""
+import uuid
+
+import pytest
+
+from gn3.auth import db
+from gn3.auth.authorisation.privileges import Privilege
+from gn3.auth.authorisation.errors import AuthorisationError
+from gn3.auth.authorisation.roles.models import Role, user_roles, create_role
+
+from tests.unit.auth import conftest
+from tests.unit.auth.fixtures import TEST_USERS
+
+create_role_failure = {
+ "status": "error",
+ "message": "Unauthorised: Could not create role"
+}
+
+uuid_fn = lambda : uuid.UUID("d32611e3-07fc-4564-b56c-786c6db6de2b")
+
+PRIVILEGES = (
+ Privilege("group:resource:view-resource",
+ "view a resource and use it in computations"),
+ Privilege("group:resource:edit-resource", "edit/update a resource"))
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected", tuple(zip(conftest.TEST_USERS[0:1], (
+ Role(uuid.UUID("d32611e3-07fc-4564-b56c-786c6db6de2b"), "a_test_role",
+ True, PRIVILEGES),))))
+def test_create_role(# pylint: disable=[too-many-arguments]
+ fxtr_app, auth_testdb_path, mocker, fxtr_users, user, expected):# pylint: disable=[unused-argument]
+ """
+ GIVEN: an authenticated user
+ WHEN: the user attempts to create a role
+ THEN: verify they are only able to create the role if they have the
+ appropriate privileges
+ """
+ mocker.patch("gn3.auth.authorisation.roles.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ the_role = create_role(cursor, "a_test_role", PRIVILEGES)
+ assert the_role == expected
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected", tuple(zip(conftest.TEST_USERS[1:], (
+ create_role_failure, create_role_failure, create_role_failure))))
+def test_create_role_raises_exception_for_unauthorised_users(# pylint: disable=[too-many-arguments]
+ fxtr_app, auth_testdb_path, mocker, fxtr_users, user, expected):# pylint: disable=[unused-argument]
+ """
+ GIVEN: an authenticated user
+ WHEN: the user attempts to create a role
+ THEN: verify they are only able to create the role if they have the
+ appropriate privileges
+ """
+ mocker.patch("gn3.auth.authorisation.roles.models.uuid4", uuid_fn)
+ mocker.patch("gn3.auth.authorisation.checks.require_oauth.acquire",
+ conftest.get_tokeniser(user))
+ with db.connection(auth_testdb_path) as conn, db.cursor(conn) as cursor:
+ with pytest.raises(AuthorisationError):
+ create_role(cursor, "a_test_role", PRIVILEGES)
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "user,expected",
+ (zip(TEST_USERS,
+ ((Role(
+ role_id=uuid.UUID('a0e67630-d502-4b9f-b23f-6805d0f30e30'),
+ role_name='group-leader', user_editable=False,
+ privileges=(
+ Privilege(privilege_id='group:resource:create-resource',
+ privilege_description='Create a resource object'),
+ Privilege(privilege_id='group:resource:delete-resource',
+ privilege_description='Delete a resource'),
+ Privilege(privilege_id='group:resource:edit-resource',
+ privilege_description='edit/update a resource'),
+ Privilege(
+ privilege_id='group:resource:view-resource',
+ privilege_description=(
+ 'view a resource and use it in computations')),
+ Privilege(privilege_id='group:role:create-role',
+ privilege_description='Create a new role'),
+ Privilege(privilege_id='group:role:delete-role',
+ privilege_description='Delete an existing role'),
+ Privilege(privilege_id='group:role:edit-role',
+ privilege_description='edit/update an existing role'),
+ Privilege(privilege_id='group:user:add-group-member',
+ privilege_description='Add a user to a group'),
+ Privilege(privilege_id='group:user:assign-role',
+ privilege_description=(
+ 'Assign a role to an existing user')),
+ Privilege(privilege_id='group:user:remove-group-member',
+ privilege_description='Remove a user from a group'),
+ Privilege(privilege_id='system:group:delete-group',
+ privilege_description='Delete a group'),
+ Privilege(privilege_id='system:group:edit-group',
+ privilege_description='Edit the details of a group'),
+ Privilege(
+ privilege_id='system:group:transfer-group-leader',
+ privilege_description=(
+ 'Transfer leadership of the group to some other '
+ 'member')),
+ Privilege(privilege_id='system:group:view-group',
+ privilege_description='View the details of a group'),
+ Privilege(privilege_id='system:user:list',
+ privilege_description='List users in the system'))),
+ Role(
+ role_id=uuid.UUID("ade7e6b0-ba9c-4b51-87d0-2af7fe39a347"),
+ role_name="group-creator", user_editable=False,
+ privileges=(
+ Privilege(privilege_id='system:group:create-group',
+ privilege_description = "Create a group"),))),
+ tuple(), tuple(), tuple()))))
+def test_user_roles(fxtr_group_user_roles, user, expected):
+ """
+ GIVEN: an authenticated user
+ WHEN: we request the user's privileges
+ THEN: return **ALL** the privileges attached to the user
+ """
+ conn, *_others = fxtr_group_user_roles
+ assert user_roles(conn, user) == expected
diff --git a/tests/unit/auth/test_token.py b/tests/unit/auth/test_token.py
new file mode 100644
index 0000000..76316ea
--- /dev/null
+++ b/tests/unit/auth/test_token.py
@@ -0,0 +1,62 @@
+"""Test the OAuth2 authorisation"""
+
+import pytest
+
+from gn3.auth import db
+
+SUCCESS_RESULT = {
+ "status_code": 200,
+ "result": {
+ "access_token": "123456ABCDE",
+ "expires_in": 864000,
+ "scope": "profile",
+ "token_type": "Bearer"}}
+
+USERNAME_PASSWORD_FAIL_RESULT = {
+ "status_code": 400,
+ "result": {
+ 'error': 'invalid_request',
+ 'error_description': 'Invalid "username" or "password" in request.'}}
+
+def gen_token(client, grant_type, user, scope): # pylint: disable=[unused-argument]
+ """Generate tokens for tests"""
+ return "123456ABCDE"
+
+@pytest.mark.unit_test
+@pytest.mark.parametrize(
+ "test_data,expected",
+ ((("group@lead.er", "password_for_user_001", 0), SUCCESS_RESULT),
+ (("group@mem.ber01", "password_for_user_002", 1), SUCCESS_RESULT),
+ (("group@mem.ber02", "password_for_user_003", 2), SUCCESS_RESULT),
+ (("unaff@iliated.user", "password_for_user_004", 3), SUCCESS_RESULT),
+ (("group@lead.er", "brrr", 0), USERNAME_PASSWORD_FAIL_RESULT),
+ (("group@mem.ber010", "password_for_user_002", 1), USERNAME_PASSWORD_FAIL_RESULT),
+ (("papa", "yada", 2), USERNAME_PASSWORD_FAIL_RESULT),
+ # (("unaff@iliated.user", "password_for_user_004", 1), USERNAME_PASSWORD_FAIL_RESULT)
+ ))
+def test_token(fxtr_app, fxtr_oauth2_clients, test_data, expected):
+ """
+ GIVEN: a registered oauth2 client, a user
+ WHEN: a token is requested via the 'password' grant
+ THEN: check that:
+ a) when email and password are valid, we get a token back
+ b) when either email or password or both are invalid, we get error message
+ back
+ c) TODO: when user tries to use wrong client, we get error message back
+ """
+ conn, oa2clients = fxtr_oauth2_clients
+ email, password, client_idx = test_data
+ data = {
+ "grant_type": "password", "scope": "profile nonexistent-scope",
+ "client_id": oa2clients[client_idx].client_id,
+ "client_secret": oa2clients[client_idx].client_secret,
+ "username": email, "password": password}
+
+ with fxtr_app.test_client() as client, db.cursor(conn) as cursor:
+ res = client.post("/api/oauth2/token", data=data)
+ # cleanup db
+ cursor.execute("DELETE FROM oauth2_tokens WHERE access_token=?",
+ (gen_token(None, None, None, None),))
+ assert res.status_code == expected["status_code"]
+ for key in expected["result"]:
+ assert res.json[key] == expected["result"][key]
diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py
new file mode 100644
index 0000000..8005c8e
--- /dev/null
+++ b/tests/unit/conftest.py
@@ -0,0 +1,35 @@
+"""Fixtures for unit tests."""
+from pathlib import Path
+from datetime import datetime
+from tempfile import TemporaryDirectory
+
+import pytest
+
+from gn3.app import create_app
+
+@pytest.fixture(scope="session")
+def fxtr_app():
+ """Fixture: setup the test app"""
+ # Do some setup
+ with TemporaryDirectory() as testdir:
+ testdb = Path(testdir).joinpath(
+ f'testdb_{datetime.now().strftime("%Y%m%dT%H%M%S")}')
+ app = create_app({
+ "TESTING": True, "AUTH_DB": testdb,
+ "OAUTH2_ACCESS_TOKEN_GENERATOR": "tests.unit.auth.test_token.gen_token"
+ })
+ app.testing = True
+ yield app
+ # Clean up after ourselves
+ testdb.unlink(missing_ok=True)
+
+@pytest.fixture(scope="session")
+def client(fxtr_app): # pylint: disable=redefined-outer-name
+ """Create a test client fixture for tests"""
+ with fxtr_app.app_context():
+ yield fxtr_app.test_client()
+
+@pytest.fixture(scope="session")
+def fxtr_app_config(client): # pylint: disable=redefined-outer-name
+ """Return the test application's configuration object"""
+ return client.application.config