#!/usr/bin/env guile \ -e main -s Step p1 lists traits that need to be computed. This is a script that fetches trait IDs from the GN database directly. The direct database calls are used right now and ought to be turned into a REST API. Run from base dir with . .guix-shell -- guile -L . -s ./scripts/precompute/list-traits-to-compute.scm You may want to forward a mysql port if there is no DB locally ssh -L 3306:127.0.0.1:3306 -f -N tux02.genenetwork.org test connection with mysql client: mysql -uwebqtlout -pwebqtlout -A -h 127.0.0.1 -P 3306 db_webqtl -e "show tables;" to create a clean slate, for now, update Locus_old with update ProbeSetXRef set Locus_old=NULL; you should see MariaDB [db_webqtl]> select count(Locus_old) from ProbeSetXRef where Locus_old != NULL limit 5; +------------------+ | count(Locus_old) | +------------------+ | 0 | +------------------+ Now list the next 1000 trait IDs: . .guix-shell -- guile -L . -s ./scripts/precompute/list-traits-to-compute.scm --first 0 --next 1000 The current logic is to list all datasets that contain a BXD. (bxd-strain-id-names #:used-for-mapping? #t) fetches all ids and strain names listed in GN. Note that this differs from the actual genotype file. To find the StrainId in a dataset: MariaDB [db_webqtl]> SELECT StrainId,value from ProbeSetData WHERE Id=115467; +----------+---------+ | StrainId | value | +----------+---------+ | 1 | 9.47169 | | 2 | 9.21621 | | 3 | 9.728 | | 4 | 9.28976 | | 5 | 9.55523 | | 6 | 9.63562 ... to speed things up a little we batch them up and check whether the BXD is part of it. When that is the case we might as well write the phenotype file because we have the trait values. !# (use-modules (dbi dbi) (gn db mysql) (gn data dataset) (gn data hits) (gn data strains) (gn util convert) (gn runner gemma) ; (rnrs base) (ice-9 getopt-long) (ice-9 match) (json) (srfi srfi-1) ) (define (write-json id recs) (display id) (newline) (write (scm->json recs)) (write (scm->json (map (lambda (r) r ) recs))) (newline) ) (define (write-phenotypes first-id num batch-size) (call-with-db (lambda (db) (begin (let [(bxd-strains (memo-bxd-strain-id-names #:used-for-mapping? #t))] (define (run-list-traits-to-compute db num prev-id) (let* [(count (if (< batch-size num) batch-size num)) (rest (- num count)) (hits (get-precompute-hits db prev-id count)) (data-ids (map (lambda (hit) (let* [(data-id (assoc-ref hit "DataId")) ; (data-id-str (int-to-string data-id)) ] data-id)) hits)) (data-str-ids (map (lambda (id) (string-append "Id=" (int-to-string id))) data-ids)) (data-ids-query (string-join data-str-ids " OR ")) (query (string-append "SELECT Id,StrainId,value FROM ProbeSetData WHERE " data-ids-query)) ] (dbi-query db query) (let [(id-traits (get-rows db '())) (nrecs '())] (for-each (lambda (r) (let* [(data-id (assoc-ref r "Id")) (strain-id (assoc-ref r "StrainId")) (value (assoc-ref r "value")) (has-lst (assoc-ref nrecs data-id)) (lst (if has-lst (acons strain-id value has-lst) '()) )] (set! nrecs (assoc-set! nrecs data-id lst)))) id-traits) (for-each (lambda (r) (match r ((id . recs) (if (has-bxd? recs) (write-json id recs) )) )) nrecs) (if (> rest 0) (run-list-traits-to-compute db rest (first (reverse data-ids)))) ;; start precompute ))) (run-list-traits-to-compute db num first-id) ;; start precompute ;; (write bxd-strains) ))))) (define (main args) ;; (write args) (let* ((option-spec '( (version (single-char #\v) (value #f)) (start-id (single-char #\s) (value #t)) (next (single-char #\n) (value #t)) (help (single-char #\h) (value #f)))) (options (getopt-long args option-spec)) (start-id (string->number (option-ref options 'start-id "0"))) (next (string->number (option-ref options 'next "5"))) (help-wanted (option-ref options 'help #f))) (if help-wanted (format #t "list-traits-to-compute writes JSON traits files from the GN DB Usage: list-traits-to-compute [options...] -s, --start-id num Start from ID (default 0) -n, --next count In batches of count size (default 5) -h, --help Display this help ") (write-phenotypes start-id next 500) ;; batch size 500 appears to work best on my machine )))