diff options
Diffstat (limited to 'gn3/api')
-rw-r--r-- | gn3/api/correlation.py | 12 | ||||
-rw-r--r-- | gn3/api/datasets.py | 44 | ||||
-rw-r--r-- | gn3/api/traits.py | 53 |
3 files changed, 104 insertions, 5 deletions
diff --git a/gn3/api/correlation.py b/gn3/api/correlation.py index 2339088..f28e1f5 100644 --- a/gn3/api/correlation.py +++ b/gn3/api/correlation.py @@ -33,9 +33,10 @@ def compute_sample_integration(corr_method="pearson"): @correlation.route("/sample_r/<string:corr_method>", methods=["POST"]) def compute_sample_r(corr_method="pearson"): - """correlation endpoint for computing sample r correlations\ + """Correlation endpoint for computing sample r correlations\ api expects the trait data with has the trait and also the\ - target_dataset data""" + target_dataset data + """ correlation_input = request.get_json() # xtodo move code below to compute_all_sampl correlation @@ -53,9 +54,10 @@ def compute_sample_r(corr_method="pearson"): @correlation.route("/lit_corr/<string:species>/<int:gene_id>", methods=["POST"]) def compute_lit_corr(species=None, gene_id=None): - """api endpoint for doing lit correlation.results for lit correlation\ + """Api endpoint for doing lit correlation.results for lit correlation\ are fetched from the database this is the only case where the db\ - might be needed for actual computing of the correlation results""" + might be needed for actual computing of the correlation results + """ conn, _cursor_object = database_connector() target_traits_gene_ids = request.get_json() @@ -72,7 +74,7 @@ def compute_lit_corr(species=None, gene_id=None): @correlation.route("/tissue_corr/<string:corr_method>", methods=["POST"]) def compute_tissue_corr(corr_method="pearson"): - """api endpoint fr doing tissue correlation""" + """Api endpoint fr doing tissue correlation""" tissue_input_data = request.get_json() primary_tissue_dict = tissue_input_data["primary_tissue"] target_tissues_dict = tissue_input_data["target_tissues_dict"] diff --git a/gn3/api/datasets.py b/gn3/api/datasets.py new file mode 100644 index 0000000..7f08de5 --- /dev/null +++ b/gn3/api/datasets.py @@ -0,0 +1,44 @@ +"""this module contains code for creating datasets""" +from flask import Blueprint +from flask import jsonify + +from gn3.computations.datasets import create_dataset +from gn3.computations.datasets import get_traits_data +from gn3.experimental_db import database_connector + + +dataset = Blueprint("dataset", __name__) + + +@dataset.route("/create/<dataset_name>/") +@dataset.route("/create/<dataset_name>/<dataset_type>") +def create_dataset_api(dataset_name, dataset_type=None): + """Endpoint of creating dataset""" + + new_dataset = create_dataset( + dataset_type=dataset_type, dataset_name=dataset_name) + + results = { + "dataset": new_dataset + } + return jsonify(results) + + +@dataset.route("/fetch_traits_data/<dataset_name>/<dataset_type>") +def fetch_traits_data(dataset_name, dataset_type): + """Endpoint for fetching Trait data""" + # should fetch this(temp) + trait_sample_ids = [4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 15, + 17, 18, 19, 20, 21, 22, 24, 25, 26, 28, 29, 30, 31, + 35, 36, 37, 39, 98, 99, 100, 103, 487, 105, 106, 110, 115, + 116, 117, 118, 119, 120, 919, 147, + 121, 40, 41, 124, 125, 128, 135, 129, 130, 131, + 132, 134, 138, 139, 140, 141, 142, 144, + 145, 148, 149, 920, 922, 2, 3, 1, 1100] + + conn, _cursor = database_connector() + results = get_traits_data(sample_ids=trait_sample_ids, database_instance=conn, + dataset_name=dataset_name, dataset_type=dataset_type) + conn.close() + + return jsonify({"results": results}) diff --git a/gn3/api/traits.py b/gn3/api/traits.py new file mode 100644 index 0000000..0ac437d --- /dev/null +++ b/gn3/api/traits.py @@ -0,0 +1,53 @@ +"""this module contains the all endpoints for traits""" +from unittest import mock + +from flask import Blueprint +from flask import jsonify +from flask import request + +from gn3.computations.traits import fetch_trait +from gn3.computations.traits import get_trait_info_data +from gn3.experimental_db import database_connector + +trait = Blueprint("trait", __name__) + + +@trait.route("/<string:trait_name>/<string:dataset_name>") +def create_trait(trait_name, dataset_name): + """Endpoint for creating trait and fetching strain\ + values""" + + # xtodo replace the object at most this endpoint + # requires dataset_type,dataset_name ,dataset_id + trait_dataset = { + "name": dataset_name, + "id": 12, + "type": "ProbeSet" # temp values + } + conn, _cursor = database_connector() + + trait_results = fetch_trait(dataset=trait_dataset, + trait_name=trait_name, + database=conn) + + conn.close() + + return jsonify(trait_results) + + +@trait.route("/trait_info/<string:trait_name>", methods=["POST"]) +def fetch_trait_info(trait_name): + """Api endpoint for fetching the trait info \ + expects the trait and trait dataset to have\ + been created """ + data = request.get_json() + + trait_dataset = data["trait_dataset"] + trait_data = data["trait"] + _trait_name = trait_name # should be used as key to return results + + database_instance = mock.Mock() + + results = get_trait_info_data(trait_dataset, trait_data, database_instance) + + return jsonify(results) |