├── .gitignore ├── COPYING ├── README.md ├── agpl-3.0.txt ├── docs ├── Makefile ├── README ├── annotile │ ├── index.txt │ └── porting_annotation_pipeline.txt ├── api │ ├── batch_processing_v0.1.1.txt │ ├── errors_v0.1.0.txt │ ├── index.txt │ ├── reasoning_v0.1.0.txt │ ├── v0.1.0.txt │ └── v0.1.1.txt ├── cgf │ └── index.txt ├── conf.py ├── data_structures │ ├── index.txt │ ├── v0.1.0.txt │ └── v0.1.1.txt ├── index.txt ├── intro │ ├── design_doc.txt │ ├── importing_genomes.txt │ ├── index.txt │ ├── install.txt │ └── overview.txt ├── lantern │ └── index.txt ├── make.bat ├── sdks │ └── index.txt ├── sprite │ ├── annotations.txt │ └── index.txt ├── tile_library │ └── index.txt └── tiling │ └── index.txt └── experimental ├── .gitignore ├── TylerSpecifications.txt ├── abram ├── Documentation │ ├── 0-Overview.md │ ├── 1-Initial-Tile-Set.md │ ├── CGF-Draft.md │ └── CGF-v2-Draft.md ├── bin │ └── vcf-sort++ ├── chopCGIVar.go ├── chopGff_hg18.go ├── createAllGenomeTiling.go ├── createHG18TileSet.go ├── crunch_examples │ ├── .-canary │ │ ├── .-canary-data │ │ │ ├── input0.txt │ │ │ ├── input1.txt │ │ │ ├── input10.txt │ │ │ ├── input100.txt │ │ │ ├── input101.txt │ │ │ ├── input102.txt │ │ │ ├── input103.txt │ │ │ ├── input104.txt │ │ │ ├── input105.txt │ │ │ ├── input106.txt │ │ │ ├── input107.txt │ │ │ ├── input108.txt │ │ │ ├── input109.txt │ │ │ ├── input11.txt │ │ │ ├── input110.txt │ │ │ ├── input111.txt │ │ │ ├── input112.txt │ │ │ ├── input113.txt │ │ │ ├── input114.txt │ │ │ ├── input115.txt │ │ │ ├── input116.txt │ │ │ ├── input117.txt │ │ │ ├── input118.txt │ │ │ ├── input119.txt │ │ │ ├── input12.txt │ │ │ ├── input120.txt │ │ │ ├── input121.txt │ │ │ ├── input122.txt │ │ │ ├── input123.txt │ │ │ ├── input124.txt │ │ │ ├── input125.txt │ │ │ ├── input126.txt │ │ │ ├── input127.txt │ │ │ ├── input128.txt │ │ │ ├── input129.txt │ │ │ ├── input13.txt │ │ │ ├── input130.txt │ │ │ ├── input131.txt │ │ │ ├── input132.txt │ │ │ ├── input133.txt │ │ │ ├── input134.txt │ │ │ ├── input135.txt │ │ │ ├── input136.txt │ │ │ ├── input137.txt │ │ │ ├── input138.txt │ │ │ ├── input139.txt │ │ │ ├── input14.txt │ │ │ ├── input140.txt │ │ │ ├── input141.txt │ │ │ ├── input142.txt │ │ │ ├── input143.txt │ │ │ ├── input144.txt │ │ │ ├── input145.txt │ │ │ ├── input146.txt │ │ │ ├── input147.txt │ │ │ ├── input148.txt │ │ │ ├── input149.txt │ │ │ ├── input15.txt │ │ │ ├── input150.txt │ │ │ ├── input16.txt │ │ │ ├── input17.txt │ │ │ ├── input18.txt │ │ │ ├── input19.txt │ │ │ ├── input2.txt │ │ │ ├── input20.txt │ │ │ ├── input21.txt │ │ │ ├── input22.txt │ │ │ ├── input23.txt │ │ │ ├── input24.txt │ │ │ ├── input25.txt │ │ │ ├── input26.txt │ │ │ ├── input27.txt │ │ │ ├── input28.txt │ │ │ ├── input29.txt │ │ │ ├── input3.txt │ │ │ ├── input30.txt │ │ │ ├── input31.txt │ │ │ ├── input32.txt │ │ │ ├── input33.txt │ │ │ ├── input34.txt │ │ │ ├── input35.txt │ │ │ ├── input36.txt │ │ │ ├── input37.txt │ │ │ ├── input38.txt │ │ │ ├── input39.txt │ │ │ ├── input4.txt │ │ │ ├── input40.txt │ │ │ ├── input41.txt │ │ │ ├── input42.txt │ │ │ ├── input43.txt │ │ │ ├── input44.txt │ │ │ ├── input45.txt │ │ │ ├── input46.txt │ │ │ ├── input47.txt │ │ │ ├── input48.txt │ │ │ ├── input49.txt │ │ │ ├── input5.txt │ │ │ ├── input50.txt │ │ │ ├── input51.txt │ │ │ ├── input52.txt │ │ │ ├── input53.txt │ │ │ ├── input54.txt │ │ │ ├── input55.txt │ │ │ ├── input56.txt │ │ │ ├── input57.txt │ │ │ ├── input58.txt │ │ │ ├── input59.txt │ │ │ ├── input6.txt │ │ │ ├── input60.txt │ │ │ ├── input61.txt │ │ │ ├── input62.txt │ │ │ ├── input63.txt │ │ │ ├── input64.txt │ │ │ ├── input65.txt │ │ │ ├── input66.txt │ │ │ ├── input67.txt │ │ │ ├── input68.txt │ │ │ ├── input69.txt │ │ │ ├── input7.txt │ │ │ ├── input70.txt │ │ │ ├── input71.txt │ │ │ ├── input72.txt │ │ │ ├── input73.txt │ │ │ ├── input74.txt │ │ │ ├── input75.txt │ │ │ ├── input76.txt │ │ │ ├── input77.txt │ │ │ ├── input78.txt │ │ │ ├── input79.txt │ │ │ ├── input8.txt │ │ │ ├── input80.txt │ │ │ ├── input81.txt │ │ │ ├── input82.txt │ │ │ ├── input83.txt │ │ │ ├── input84.txt │ │ │ ├── input85.txt │ │ │ ├── input86.txt │ │ │ ├── input87.txt │ │ │ ├── input88.txt │ │ │ ├── input89.txt │ │ │ ├── input9.txt │ │ │ ├── input90.txt │ │ │ ├── input91.txt │ │ │ ├── input92.txt │ │ │ ├── input93.txt │ │ │ ├── input94.txt │ │ │ ├── input95.txt │ │ │ ├── input96.txt │ │ │ ├── input97.txt │ │ │ ├── input98.txt │ │ │ └── input99.txt │ │ ├── canary.pipeline │ │ ├── grep │ │ └── register_and_run.sh │ ├── 0-simple-pipeline │ │ ├── register_and_run.sh │ │ ├── systemInfo.pipeline │ │ └── systemInfo.py │ ├── 1-input-pipeline │ │ ├── 1-input-pipeline-data │ │ │ ├── seq1.fa │ │ │ ├── seq10.fa │ │ │ ├── seq11.fa │ │ │ ├── seq12.fa │ │ │ ├── seq13.fa │ │ │ ├── seq14.fa │ │ │ ├── seq15.fa │ │ │ ├── seq16.fa │ │ │ ├── seq17.fa │ │ │ ├── seq18.fa │ │ │ ├── seq19.fa │ │ │ ├── seq2.fa │ │ │ ├── seq20.fa │ │ │ ├── seq21.fa │ │ │ ├── seq22.fa │ │ │ ├── seq3.fa │ │ │ ├── seq4.fa │ │ │ ├── seq5.fa │ │ │ ├── seq6.fa │ │ │ ├── seq7.fa │ │ │ ├── seq8.fa │ │ │ ├── seq9.fa │ │ │ ├── seqM.fa │ │ │ ├── seqX.fa │ │ │ └── seqY.fa │ │ ├── grep │ │ ├── grep.pipeline │ │ └── register_and_run.sh │ ├── 2-chain-pipeline │ │ ├── 2-chain-pipeline-data │ │ │ ├── seq1.fa │ │ │ ├── seq10.fa │ │ │ ├── seq11.fa │ │ │ ├── seq12.fa │ │ │ ├── seq13.fa │ │ │ ├── seq14.fa │ │ │ ├── seq15.fa │ │ │ ├── seq16.fa │ │ │ ├── seq17.fa │ │ │ ├── seq18.fa │ │ │ ├── seq19.fa │ │ │ ├── seq2.fa │ │ │ ├── seq20.fa │ │ │ ├── seq21.fa │ │ │ ├── seq22.fa │ │ │ ├── seq3.fa │ │ │ ├── seq4.fa │ │ │ ├── seq5.fa │ │ │ ├── seq6.fa │ │ │ ├── seq7.fa │ │ │ ├── seq8.fa │ │ │ ├── seq9.fa │ │ │ ├── seqM.fa │ │ │ ├── seqX.fa │ │ │ └── seqY.fa │ │ ├── chain-example.pipeline │ │ ├── grep │ │ ├── post-process.py │ │ └── register_and_run.sh │ ├── 3-multiple-input │ │ ├── 3-multiple-input-data │ │ │ ├── input0.txt │ │ │ └── input1.txt │ │ ├── multiInput.pipeline │ │ ├── multiInput.py │ │ └── register_and_run.sh │ ├── 4-mount-input │ │ ├── 4-mount-input │ │ │ ├── README │ │ │ ├── README~ │ │ │ ├── sampleA │ │ │ │ └── seqA.fa │ │ │ └── sampleB │ │ │ │ └── seqB.fa │ │ ├── mountInput.pipeline │ │ ├── mountInput.py │ │ └── register_and_run.sh │ ├── 5-custom-task │ │ ├── 5-custom-task-data │ │ │ ├── A │ │ │ │ ├── A1.txt │ │ │ │ ├── A2.txt │ │ │ │ ├── A3.txt │ │ │ │ ├── A4.txt │ │ │ │ └── A5.txt │ │ │ └── B │ │ │ │ ├── B100.txt │ │ │ │ ├── B101.txt │ │ │ │ ├── B102.txt │ │ │ │ ├── B103.txt │ │ │ │ ├── B104.txt │ │ │ │ ├── B105.txt │ │ │ │ └── B106.txt │ │ ├── customTask.pipeline │ │ ├── customTask.py │ │ └── register_and_run.sh │ └── README.md ├── extend_polyphen_with_hg19.go ├── fastj2gvcf.go └── gffstats.go ├── align2gvcf.go ├── aux └── aux.go ├── beacon ├── .gitignore ├── Procfile ├── README.md ├── beacon.py ├── requirements.txt ├── static │ ├── fav.ico │ ├── nyancat.gif │ └── style.css └── templates │ ├── botsearch.html │ ├── layout.html │ ├── people.html │ └── search.html ├── bioenv └── bioenv.go ├── build-seed-tileset.go ├── buildTileSet.go ├── cgf ├── cgf.go ├── cgf_default.go ├── cgf_test.go ├── cgf_tile_map.go └── cgf_tile_map_unphased.go ├── chopGff.go ├── createBandBedGraph.go ├── createTagAndPosFromBandBedGraph.go ├── createTileSetFromGff.go ├── extendTileSetHG19.go ├── filledTileSetFromGff.go ├── findTagSet.go ├── fjtools ├── cgfcheck.go ├── cgfpeek.go ├── fj2cgf.go ├── fjcheck.go ├── fjdiff.go ├── fjfilter.go └── gff2fj.go ├── lantern ├── flint.go ├── lantern-0-0-1 ├── lantern.go ├── lantern_common_error.go ├── lantern_parse.go ├── lantern_sample_intersect.go ├── lantern_sample_position_variant.go ├── lantern_sample_tile_group_match.go ├── lantern_sample_tile_neighborhood.go ├── lantern_system_info.go ├── lantern_tile.go ├── lantern_tile_sequence.go └── rsgrease │ └── rsgrease.go ├── mergeFastj.go ├── pylightweb ├── .dockerignore ├── .gitignore ├── Dockerfile ├── README.md └── lightning │ ├── README.md │ ├── api │ ├── __init__.py │ ├── serializers.py │ ├── tests.py │ ├── urls.py │ └── views.py │ ├── api_gui │ ├── __init__.py │ ├── admin.py │ ├── forms.py │ ├── migrations │ │ └── __init__.py │ ├── models.py │ ├── tests.py │ ├── urls.py │ └── views.py │ ├── errors.py │ ├── lightning │ ├── __init__.py │ ├── settings.py │ ├── urls.py │ ├── views.py │ └── wsgi.py │ ├── manage.py │ ├── static │ ├── bootstrap-3.2.0-dist │ │ ├── css │ │ │ ├── bootstrap-theme.css │ │ │ ├── bootstrap-theme.css.map │ │ │ ├── bootstrap-theme.min.css │ │ │ ├── bootstrap.css │ │ │ ├── bootstrap.css.map │ │ │ └── bootstrap.min.css │ │ ├── fonts │ │ │ ├── glyphicons-halflings-regular.eot │ │ │ ├── glyphicons-halflings-regular.svg │ │ │ ├── glyphicons-halflings-regular.ttf │ │ │ └── glyphicons-halflings-regular.woff │ │ └── js │ │ │ ├── bootstrap.js │ │ │ └── bootstrap.min.js │ ├── jquery-1.10.1.min.js │ └── jquery-1.10.1.min.map │ ├── templates │ ├── api │ │ └── api_docs.html │ ├── api_gui │ │ └── index.html │ ├── base.html │ └── lightning │ │ ├── help.html │ │ └── index.html │ ├── tile_library │ ├── __init__.py │ ├── admin.py │ ├── basic_functions.py │ ├── constants.py │ ├── generate_stats.py │ ├── human_readable_functions.py │ ├── lantern_query_functions.py │ ├── migrations │ │ ├── 0001_initial.py │ │ ├── 0002_auto_20141002_1101.py │ │ ├── 0003_auto_20141009_1345.py │ │ ├── 0004_genomestatistic.py │ │ ├── 0005_auto_20141010_0933.py │ │ ├── 0006_auto_20141010_1100.py │ │ ├── 0007_genomestatistic_path_name.py │ │ ├── 0008_auto_20141017_1402.py │ │ ├── 0009_auto_20141020_1808.py │ │ ├── 0010_auto_20141103_1533.py │ │ ├── 0011_auto_20141103_1546.py │ │ ├── 0012_auto_20141103_1548.py │ │ ├── 0013_auto_20141104_1715.py │ │ ├── 0014_auto_20141104_1716.py │ │ ├── 0015_auto_20141104_1726.py │ │ ├── 0016_auto_20141104_1727.py │ │ ├── 0017_auto_20141106_1306.py │ │ ├── 0018_auto_20141107_1550.py │ │ ├── 0019_auto_20141107_1843.py │ │ ├── 0020_auto_20141107_2040.py │ │ ├── 0021_auto_20141110_1327.py │ │ ├── 0022_auto_20141110_1750.py │ │ ├── 0023_auto_20141110_1807.py │ │ ├── 0024_auto_20141110_1808.py │ │ ├── 0025_auto_20141110_1815.py │ │ ├── 0026_auto_20141110_1923.py │ │ ├── 0027_tilevariant_conversion_to_cgf.py │ │ ├── 0028_auto_20141210_1126.py │ │ ├── 0029_auto_20150129_1416.py │ │ ├── 0030_auto_20150130_1750.py │ │ ├── 0031_auto_20150202_1042.py │ │ ├── 0032_auto_20150202_1227.py │ │ ├── 0033_auto_20150202_1339.py │ │ ├── 0034_auto_20150202_1349.py │ │ ├── 0035_auto_20150202_1616.py │ │ ├── 0036_auto_20150202_1653.py │ │ ├── 0037_auto_20150202_1749.py │ │ ├── 0039_auto_20150220_1417.py │ │ ├── 0040_auto_20150223_1333.py │ │ ├── 0041_auto_20150224_1826.py │ │ ├── 0042_auto_20150227_1931.py │ │ ├── 0043_auto_20150304_1149.py │ │ ├── 0044_auto_20150304_1415.py │ │ ├── 0045_auto_20150304_1716.py │ │ ├── 0046_auto_20150311_1637.py │ │ ├── 0047_auto_20150311_1653.py │ │ └── __init__.py │ ├── models.py │ ├── query_functions.py │ ├── test_scripts │ │ ├── __init__.py │ │ ├── complicated_library.py │ │ └── python_lantern.py │ └── tests.py │ └── tile_library_generation │ ├── __init__.py │ ├── constants.py │ ├── crunch_scripts │ └── add_population.py │ ├── fastj_functions.py │ ├── fastj_objects.py │ └── validators.py ├── recache └── recache.go ├── regexTileSetFromGff ├── regexTileSetFromGff.go ├── roughCountVarInTile.go ├── sloppyjson ├── README.md ├── sloppyjson.go └── sloppyjson_test.go ├── statvisualization ├── lencount2csv.py ├── sorted2csv.py ├── tilediststats.html └── tilelenstats.html ├── tile └── tile.go ├── tile_cache ├── tile_cache.go └── tile_cache_test.go ├── tile_dbh └── tile_dbh.go └── tileruler ├── .gitignore ├── README.md ├── cmd ├── abv.go ├── cmd.go ├── compare.go ├── gen.go ├── gen_test.go ├── plot.go ├── reverse.go └── stat.go ├── legacy.go ├── modules ├── abv │ └── abv.go ├── base │ ├── base.go │ ├── cmd.go │ └── utils.go ├── cli │ ├── app.go │ ├── cli.go │ ├── command.go │ ├── context.go │ ├── flag.go │ └── help.go ├── log │ └── log.go ├── plot │ ├── bar.go │ ├── chart.go │ ├── data.go │ ├── js.go │ ├── line.go │ ├── line_no_curve.go │ ├── line_no_dot.go │ ├── main.go │ ├── pie.go │ └── server.go └── rule │ └── rule.go └── tileruler.go /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | docs/_build 3 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | Copyright (C) 2014 Curoverse Inc. 2 | 3 | This program is free software: you can redistribute it and/or modify it 4 | under the terms of the GNU Affero General Public License version 3 as 5 | published by the Free Software Foundation, either version 3 of the License, 6 | or (at your option) any later version. (see agpl-3.0.txt) 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Lightning 2 | ========= 3 | 4 | ## Setting-up a local lightning cluster: 5 | * This will **not** populate the database with a tile library or population. These capabilities are under development. 6 | * An official Curoverse and/or Arvados docker image is in development. 7 | 8 | ## Recipe: 9 | 1. Install docker (https://docs.docker.com/installation/) 10 | 2. Start a container running postgres (named base-postgres here) 11 | 12 | ``` 13 | $ docker run --name base-postgres -e POSTGRES_USER=lightning -e POSTGRES_PASSWORD=mypassword -d postgres 14 | ``` 15 | 3. Download the lightning docker image (currently sguthrie/lightning) 16 | 17 | a. You can do this by pulling sguthrie/lightning from the docker repository 18 | ``` 19 | $ docker pull sguthrie/lightning 20 | ``` 21 | b. Or building it yourself using the Dockerfile in this git repository (which takes more time but guarantees the image is up-to-date) 22 | ``` 23 | lightning/experimental/pylightweb$ docker build -t sguthrie/lightning . 24 | ``` 25 | 26 | 4. Run an interactive lightning container and link it to the the postgres container 27 | 28 | ``` 29 | $ docker run --name lightning -it --rm --link base-postgres:postgres sguthrie/lightning /bin/bash 30 | ``` 31 | 32 | 5. Inside the lightning container, in case the docker image is out of date, pull the most recent version of lightning from github 33 | 34 | ``` 35 | /home/lightning/lightning/experimental/pylightweb/lightning# git pull 36 | ``` 37 | 38 | 6. Inside the lightning container, migrate the lightning tables into your postgres container 39 | 40 | ``` 41 | /home/lightning/lightning/experimental/pylightweb/lightning# python manage.py migrate 42 | ``` 43 | 44 | 7. That's it! You can test the installation inside the lightning container 45 | 46 | ``` 47 | /home/lightning/lightning/experimental/pylightweb/lightning# python manage.py test tile_library 48 | ``` 49 | 50 | -------------------------------------------------------------------------------- /docs/README: -------------------------------------------------------------------------------- 1 | The documentation in this tree is in plain text fields and can be viewed using 2 | your favorite text file viewer. 3 | 4 | It uses ReST (reStructuredText) [1] and the Sphinx documentation system [2]. 5 | 6 | To create an HTML version of these docs: 7 | 8 | * Install Sphinx (which can be done using python by running ``pip install Sphinx``) 9 | 10 | * In docs/, run ``make html`` (or ``make.bat html`` on Windows) at a shell prompt 11 | 12 | The documentation can be viewed using a web browser by opening _build/html/index.html 13 | 14 | %TODO: once site runs documentation, link to it here 15 | 16 | [1] http://docutils.sourceforge.net/rst.html 17 | [2] http://sphinx-doc.org/ 18 | -------------------------------------------------------------------------------- /docs/annotile/porting_annotation_pipeline.txt: -------------------------------------------------------------------------------- 1 | Adding an Annotation Pipeline to Annotile 2 | ========================================== 3 | 4 | Build a component for the `annotate_tile_variants` pipeline. This is likely to 5 | require implementing a crunch script and creating a docker image. 6 | 7 | Automated Annotation Pipelines 8 | ------------------------------- 9 | 10 | If the annotation pipeline you wish to port cannot use a VCF file (aligned against 11 | GRCh37) as input, you will also need to implement a component to generate the 12 | required input for your annotation pipeline. Use the `create-vcf-per-tile` component 13 | as an example. 14 | 15 | The components you implement must output a collection with a directory named 16 | `annotile_input`. 17 | 18 | .. todo:: 19 | After implementing annotile, document the output format requirements 20 | 21 | User-added Annotations 22 | ----------------------- 23 | 24 | User added annotations can be converted into a component in the `annotate_tile_variants` 25 | pipeline by hard-coding annotations applying to specific tile variants. A component 26 | which does this already is linked to a Sprite user-added annotations app. Use this 27 | component and application as an example. 28 | -------------------------------------------------------------------------------- /docs/api/batch_processing_v0.1.1.txt: -------------------------------------------------------------------------------- 1 | Batch Processing 2 | ================= 3 | 4 | The API calls are excellent for one-off queries. However, many users may wish to 5 | use these calls many times for many samples and/or over large parts of the genome. 6 | To speed up these queries and reduce load on the server, Lightning plans to 7 | support batch processing. 8 | 9 | .. todo:: 10 | Document these better 11 | 12 | Filter to Find Tile Variants Matching Filters 13 | ---------------------------------------------- 14 | Finds tile variants fulfilling a specified filters 15 | 16 | * REGEX on sequence, start tag, or end tag 17 | * Float comparison on length, the number of positions spanned, the frequency 18 | of this tile, or the number of callsets called at this position 19 | * True/False on whether the tile is at the start of path or at the end of the 20 | path 21 | 22 | Find Loci of Multiple Tiles at Once 23 | ------------------------------------ 24 | .. todo:: 25 | Document 26 | 27 | Retrieve the Library Around a Group of Loci 28 | -------------------------------------------- 29 | Returns a list of tile-positions touching given loci, a dictionary of 30 | tile-variants touching those loci with the sequence of the tile variant, cut 31 | off if appropriate, and the tag-length. We should be able to name the loci 32 | for convenience. 33 | 34 | .. todo:: 35 | Should document the INDEL behavior. Previous documentation is at 36 | lightning-dev4.curoverse.com/pad/p/lightning-indel-behavior 37 | -------------------------------------------------------------------------------- /docs/api/errors_v0.1.0.txt: -------------------------------------------------------------------------------- 1 | Lightning Errors 2 | ================= 3 | 4 | UnknownAssembly 5 | ---------------- 6 | Used when user requests an assembly that is not loaded into the Lightning 7 | instance 8 | -------------------------------------------------------------------------------- /docs/api/index.txt: -------------------------------------------------------------------------------- 1 | Lightning API Specifications 2 | ==================================== 3 | 4 | We highly recommend reading the appropriate version of :doc:`../data_structures/index` 5 | before reading the API specifications you are interested in. 6 | 7 | Components for each API specification: 8 | 9 | * Server Namespace 10 | * API Calls 11 | * API Examples 12 | 13 | Contents: 14 | 15 | .. toctree:: 16 | 17 | v0.1.0 18 | reasoning_v0.1.0 19 | errors_v0.1.0 20 | v0.1.1 21 | batch_processing_v0.1.1 22 | 23 | Versioning 24 | ----------- 25 | 26 | URL Parameter Versioning: The client specifies the version as part of the URL path:: 27 | 28 | GET /v0.1.0/status HTTP/1.1 29 | Host: lightning.curoverse.com 30 | Accept: application/json 31 | 32 | For more information and implementation details, reference 33 | http://www.django-rest-framework.org/api-guide/versioning/. 34 | 35 | Paging 36 | -------- 37 | 38 | Limit Offset Pagination: The client specifies the limit and offset using request 39 | query parameters 40 | 41 | ``limit``: indicates the maximum number of items to return. It's not required. 42 | Default limit is 100. Maximum limit is 1000. 43 | 44 | ``offset``: indicates the starting position of the query in relation to the 45 | complete set of unpaginated items 46 | 47 | For information and implementation details, reference 48 | http://www.django-rest-framework.org/api-guide/pagination/. 49 | -------------------------------------------------------------------------------- /docs/api/reasoning_v0.1.0.txt: -------------------------------------------------------------------------------- 1 | Reasoning Behind the API 2 | ========================= 3 | 4 | API design! Here are some explanations! 5 | 6 | .. todo:: 7 | 8 | Document API explanations 9 | 10 | -------------------------------------------------------------------------------- /docs/cgf/index.txt: -------------------------------------------------------------------------------- 1 | Compact Genome File (CGF) Format 2 | ================================= 3 | 4 | Compact Genome File Format is our preliminary format that stores tiled genomes 5 | compactly. 6 | 7 | .. todo:: 8 | 9 | Document details about CGF format 10 | -------------------------------------------------------------------------------- /docs/data_structures/index.txt: -------------------------------------------------------------------------------- 1 | Data Structures Specifications 2 | ================================ 3 | 4 | Here are some data structures. 5 | 6 | Contents: 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | 11 | v0.1.0 12 | v0.1.1 13 | -------------------------------------------------------------------------------- /docs/index.txt: -------------------------------------------------------------------------------- 1 | .. Lightning documentation master file, created by 2 | sphinx-quickstart on Tue Sep 15 18:29:51 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Lightning's documentation! 7 | ===================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 1 13 | 14 | intro/index 15 | tiling/index 16 | cgf/index 17 | lantern/index 18 | tile_library/index 19 | annotile/index 20 | data_structures/index 21 | api/index 22 | sdks/index 23 | sprite/index 24 | 25 | 26 | .. note:: 27 | Please keep in mind Lightning is under development, as is its documentation. 28 | Feel free to file bugs or documentation errors at https://dev.arvados.org/projects/lightning. 29 | 30 | .. todo:: 31 | Make sure https://dev.arvados.org/projects/lightning is public and a well-behaved 32 | redmine account. 33 | 34 | For an introduction, basic information about using Lightning, and Lightning's design 35 | document, see :doc:`intro/index`. 36 | 37 | For a description of the process of tiling (the abstraction of genomic sequences 38 | that makes Lightning possible) and the functions and pipelines we provide for tiling 39 | genomes, see :doc:`tiling/index`. 40 | 41 | For a description of our representation of tiled genomes (Compact Genome Format), 42 | see :doc:`cgf/index`. 43 | 44 | For a description of our in-memory database for tiled genomes (Lantern), including 45 | the REST API it supports, see :doc:`lantern/index`. 46 | 47 | For a description of our in-memory database for tile variants (the tile library), 48 | see :doc:`tile_library/index`. 49 | 50 | For a description of Annotile, the way we support annotations of tile variants, how 51 | to import annotation software, and how to add human-generated annotations, see 52 | :doc:`annotile/index`. 53 | 54 | For a description of Lightning Data Structures, used for interaction with the 55 | Lightning APIs, see :doc:`data_structures/index`. 56 | 57 | For a description of Lightning APIs, see :doc:`api/index`. 58 | 59 | For a description of Lightning Software Development Kits, see :doc:`sdks/index`. 60 | 61 | Finally, for a description of the web browser application that runs on Lightning 62 | (Sprite), see :doc:`sprite/index`. 63 | 64 | 65 | Indices and tables 66 | ================== 67 | 68 | * :ref:`genindex` 69 | * :ref:`modindex` 70 | * :ref:`search` 71 | -------------------------------------------------------------------------------- /docs/intro/importing_genomes.txt: -------------------------------------------------------------------------------- 1 | Importing a Genome 2 | =================== 3 | 4 | Importing a genome is difficult. We'll probably do it using pipelines. Have one 5 | pipeline in pipeline_templates that is run when someone uploads a genome. 6 | 7 | We acknowledge that phenotypes or meta-information about each genome is 8 | vital for interpreting results. However, the breadth of available phenotype 9 | databases and the number of possible pieces of information makes using Lightning 10 | as a phenotype database infeasible. Sprite provides a preliminary phenotype database, 11 | which includes information about whether each genome (or set of calls, also known 12 | as a callset) is a reference. Additionally, it stores whether the phases are well 13 | known, as well as the sex and the ethnicity. 14 | 15 | .. todo:: 16 | Implement a better process for importing a genome 17 | Document the process 18 | -------------------------------------------------------------------------------- /docs/intro/index.txt: -------------------------------------------------------------------------------- 1 | Getting started 2 | ================= 3 | 4 | Welcome to Lightning! This documentation section is most relevant for first time 5 | users. :doc:`overview` provides an overview of Lighting: its uses and components. 6 | How-to guides and tutorials are expected to be placed here as the project grows. 7 | 8 | .. toctree:: 9 | :maxdepth: 1 10 | 11 | overview 12 | importing_genomes 13 | design_doc 14 | install 15 | -------------------------------------------------------------------------------- /docs/intro/install.txt: -------------------------------------------------------------------------------- 1 | Installation 2 | ============= 3 | 4 | Since Lightning is in development, our installation process has not yet been 5 | determined, implemented, or documented. 6 | 7 | .. todo:: 8 | Create a good installation process and document it 9 | -------------------------------------------------------------------------------- /docs/lantern/index.txt: -------------------------------------------------------------------------------- 1 | Lantern Specifications 2 | ======================= 3 | 4 | Lantern is our in-memory database which stores individual tiled genome sequences. It 5 | allows us to make very fast comparisons between genomes and filter on genome sequences. 6 | Since it is contained in random access memory (RAM), queries are very fast. 7 | 8 | .. todo:: 9 | 10 | Document Lantern and the REST APIs used to interact with Lantern 11 | -------------------------------------------------------------------------------- /docs/sdks/index.txt: -------------------------------------------------------------------------------- 1 | Software Development Kits 2 | ========================== 3 | 4 | This section is here for completeness: we currently do not have sdks available 5 | for Lightning. 6 | -------------------------------------------------------------------------------- /docs/sprite/index.txt: -------------------------------------------------------------------------------- 1 | Sprite 2 | ======== 3 | 4 | Sprite is a django web application that provides visualizations and easy interaction 5 | with Lightning, as well as preliminary phenotype and tile variant annotation databases. 6 | 7 | .. todo:: 8 | * Specify phenotype database 9 | * Specify annotation database(s) 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | annotations 15 | -------------------------------------------------------------------------------- /docs/tile_library/index.txt: -------------------------------------------------------------------------------- 1 | Tile Library 2 | ============= 3 | 4 | The tile library is our in-memory database that stores tile variants and their 5 | information. Tile variants are associated with their sequence, MD5 hash digest, 6 | tile position, and other meta-information. The tile library stores this information 7 | compactly while enabling queries on it. 8 | 9 | .. todo:: 10 | Describe/specify the queries the Tile Library supports 11 | -------------------------------------------------------------------------------- /docs/tiling/index.txt: -------------------------------------------------------------------------------- 1 | Tiling Overview 2 | ================ 3 | 4 | Lightning is made possible by the process of ``tiling``, which takes advantage of 5 | the high degree of redundancy in a population of genomes. Tiling partitions genomes 6 | into ``tiles``: overlapping, variable-length sequences that begin and end with unique 7 | k-mers, termed ``tags``. This document is an overview of our tiling implementation, 8 | along with the crunch scripts and pipeline templates available for tiling genome 9 | inputs. 10 | 11 | We currently accept GFF files and Complete Genomics CGI-var files as inputs. 12 | 13 | .. todo:: 14 | * Document PASTA/FASTA 15 | * Write basic instructions for writing one's own conversion of file to a tiling 16 | (As I understand it, one must write a conversion between that file format 17 | and PASTA, then probably add a pipeline template component). 18 | -------------------------------------------------------------------------------- /experimental/.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | -------------------------------------------------------------------------------- /experimental/TylerSpecifications.txt: -------------------------------------------------------------------------------- 1 | h1. Tyler is a FASTA --> Tiling service 2 | 3 | We have decided that it creates problems by extracting the FASTA sequences away from their context. Without aligning the sequences, partial tiles become incredibly common, as do poorly sequenced regions. Given that this use case is not very common and is strictly less powerful than BLAST, the only reason to implement Tyler is if it is an easier task than aligning FASTQ to Tiles or aligning called genomes (VCF, GFF, etc) to Tiles. We have decided this is not the case, so until the FASTA --> Tiling service is requested, this is not going to be implemented. 4 | 5 | h1. Documentation 6 | 7 | h3. Scope 8 | 9 | Tyler should be a program that users run on their laptops. It should take 1 minute to tile an entire human genome on a laptop (where the limiting time factor is the hard-drive read speed limit). It requires a tag set to initialize, takes FASTA sequences as input, and returns the tiling of the FASTA sequences. 10 | 11 | h3. Set up 12 | 13 | Tyler should be set up with a tag set. This tag set should be standardized, containing the tag sequences along with annotations (such as chromosome identifiers and paths). Our current tag set is input to the FASTJ generator as a bigwig file. It is probable that using a bigwig file is less than ideal, and we should develop a standard for tag set inputs (and in addition, give each tag set a unique identifier based on its content). 14 | 15 | h3. Input and Output 16 | 17 | Once Tyler has been set up, it should take a well-sequenced FASTA sequence (no n's) and return the unique tile identifier(s) associated with that FASTA sequence. These tile identifiers should include a flag indicating whether the tile is partial (includes only one tag), its position, tag(s), and length. 18 | 19 | h3. Errors and User Warnings 20 | 21 | * If the input FASTA sequence has n's, Tyler should throw an error to the user. 22 | * If the input FASTA sequence does not include any tags, Tyler should throw an error to the user. 23 | 24 | It is not Tyler's job to notice uncontiguous tags, which we predict will likely occur in cancer genomes and large rearrangements. Noticing uncontinuous tags is left to software running Tyler or to a more advanced Tyler (Tyler 2.0). 25 | 26 | h3. Additional notes 27 | 28 | For completion and to be useful to users, Tyler requires an additional service (probably Sprite), that can recognize its output. This service, when given the output from Tyler, should return the raw sequence, annotations, HGVS names, etc. This service will be much heavier in terms of information needed to run it, and so it should probably be exposed to the public via a website. 29 | 30 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input0.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 0 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input1.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 1 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input10.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 10 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input100.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 100 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input101.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 101 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input102.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 102 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input103.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 103 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input104.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 104 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input105.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 105 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input106.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 106 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input107.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 107 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input108.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 108 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input109.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 109 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input11.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 11 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input110.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 110 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input111.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 111 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input112.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 112 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input113.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 113 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input114.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 114 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input115.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 115 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input116.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 116 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input117.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 117 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input118.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 118 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input119.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 119 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input12.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 12 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input120.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 120 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input121.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 121 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input122.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 122 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input123.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 123 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input124.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 124 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input125.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 125 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input126.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 126 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input127.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 127 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input128.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 128 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input129.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 129 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input13.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 13 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input130.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 130 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input131.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 131 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input132.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 132 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input133.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 133 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input134.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 134 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input135.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 135 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input136.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 136 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input137.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 137 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input138.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 138 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input139.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 139 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input14.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 14 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input140.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 140 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input141.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 141 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input142.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 142 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input143.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 143 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input144.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 144 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input145.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 145 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input146.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 146 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input147.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 147 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input148.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 148 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input149.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 149 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input15.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 15 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input150.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 150 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input16.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 16 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input17.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 17 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input18.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 18 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input19.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 19 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input2.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 2 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input20.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 20 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input21.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 21 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input22.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 22 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input23.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 23 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input24.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 24 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input25.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 25 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input26.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 26 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input27.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 27 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input28.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 28 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input29.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 29 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input3.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 3 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input30.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 30 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input31.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 31 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input32.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 32 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input33.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 33 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input34.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 34 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input35.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 35 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input36.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 36 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input37.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 37 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input38.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 38 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input39.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 39 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input4.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 4 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input40.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 40 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input41.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 41 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input42.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 42 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input43.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 43 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input44.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 44 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input45.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 45 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input46.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 46 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input47.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 47 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input48.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 48 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input49.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 49 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input5.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 5 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input50.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 50 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input51.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 51 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input52.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 52 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input53.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 53 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input54.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 54 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input55.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 55 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input56.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 56 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input57.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 57 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input58.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 58 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input59.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 59 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input6.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 6 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input60.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 60 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input61.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 61 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input62.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 62 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input63.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 63 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input64.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 64 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input65.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 65 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input66.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 66 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input67.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 67 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input68.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 68 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input69.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 69 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input7.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 7 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input70.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 70 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input71.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 71 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input72.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 72 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input73.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 73 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input74.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 74 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input75.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 75 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input76.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 76 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input77.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 77 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input78.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 78 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input79.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 79 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input8.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 8 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input80.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 80 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input81.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 81 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input82.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 82 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input83.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 83 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input84.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 84 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input85.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 85 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input86.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 86 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input87.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 87 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input88.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 88 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input89.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 89 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input9.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 9 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input90.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 90 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input91.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 91 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input92.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 92 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input93.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 93 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input94.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 94 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input95.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 95 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input96.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 96 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input97.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 97 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input98.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 98 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/.-canary-data/input99.txt: -------------------------------------------------------------------------------- 1 | This is some input for text file 99 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/canary.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Coalmine", 3 | 4 | "components": { 5 | 6 | "Canary": { 7 | 8 | "script_parameters": { 9 | "pattern": "input", 10 | "input": { 11 | "required" : true, 12 | "dataclass" : "Collection" 13 | } 14 | }, 15 | 16 | "repository": "$USER", 17 | "script_version": "master", 18 | "script": "examples/.-canary/grep" 19 | 20 | } 21 | 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/grep: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import arvados 4 | import re 5 | 6 | arvados.job_setup.one_task_per_input_file(if_sequence=0, and_end_task=True) 7 | 8 | this_job = arvados.current_job() 9 | this_task = arvados.current_task() 10 | this_task_input = this_task['parameters']['input'] 11 | pattern = re.compile(this_job['script_parameters']['pattern']) 12 | 13 | input_file = list(arvados.CollectionReader(this_task_input).all_files())[0] 14 | out = arvados.CollectionWriter() 15 | out.set_current_file_name(input_file.decompressed_name()) 16 | out.set_current_stream_name(input_file.stream_name()) 17 | for line in input_file.readlines(): 18 | if pattern.search(line): 19 | out.write(line) 20 | 21 | this_task.set_output(out.finish()) 22 | 23 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/.-canary/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | 12 | pipeline_template="canary.pipeline" 13 | 14 | # An example dataset to process. The data set is small and is 15 | # in this subtree of the repository, so we could just reference it directly 16 | # in our script, but the idea is to show how you would use data in your 17 | # crunch script from the keep store. 18 | # 19 | DATAUUID=`arv keep put --no-progress .-canary-data` 20 | 21 | # Register the pipeline with Arvados 22 | # 23 | template=`cat $pipeline_template | json -E "this.components.Canary.repository=\"$USER\"" ` 24 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 25 | echo "pipline template:" $ARVUUID 26 | 27 | # And run the pipeline here 28 | # 29 | arv pipeline run --no-reuse --run-here --template $ARVUUID Canary::input=$DATAUUID 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/0-simple-pipeline/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | pt="systemInfo.pipeline" 12 | 13 | template=` cat $pt | json -E "this.components.housekeeper.repository=\"$USER\"" ` 14 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 15 | echo "pipline template:" $ARVUUID 16 | 17 | arv pipeline run --run-here --template $ARVUUID 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/0-simple-pipeline/systemInfo.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "premeditated jointing Mass", 3 | "components": { 4 | "housekeeper": { 5 | "script_parameters": {}, 6 | "script_version": "master", 7 | "repository": "$USER", 8 | "script": "examples/0-simple-pipeline/systemInfo.py" 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/0-simple-pipeline/systemInfo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # A script to do some basic qurerying of the system 4 | # it's running on. 5 | # 6 | # The output will be put into a collection. 7 | # 8 | 9 | import time 10 | sSec = time.time() 11 | 12 | import arvados as arv 13 | import subprocess as sp 14 | 15 | def logInfo( of ): 16 | whoinfo = sp.check_output(["whoami"]) 17 | of.write("user: " + whoinfo) 18 | 19 | pwdinfo = sp.check_output(["pwd"]) 20 | of.write("pwd: " + pwdinfo) 21 | 22 | lsinfo = sp.check_output(["ls", "-lahR"]) 23 | of.write("directory structure:\n" + lsinfo) 24 | 25 | dfinfo = sp.check_output(["df", "-h"]) 26 | of.write("df:\n" + dfinfo) 27 | 28 | meminfo = sp.check_output(["free", "-hm"]) 29 | of.write("mem:\n" + meminfo) 30 | 31 | hostinfo = sp.check_output(["hostname"]) 32 | of.write("host: " + hostinfo) 33 | 34 | 35 | job = arv.current_job() 36 | task = arv.current_task() 37 | 38 | of = arv.CollectionWriter() 39 | of.set_current_file_name("info.log") 40 | 41 | whoinfo = sp.check_output(["whoami"]) 42 | of.write("user: " + whoinfo + "\n" ) 43 | 44 | pwdinfo = sp.check_output(["pwd"]) 45 | of.write("pwd: " + pwdinfo + "\n" ) 46 | 47 | lsinfo = sp.check_output(["ls", "-lahR"]) 48 | of.write("directory structure:\n" + lsinfo) 49 | 50 | dfinfo = sp.check_output(["df", "-h"]) 51 | of.write("df:\n" + dfinfo + "\n" ) 52 | 53 | meminfo = sp.check_output(["free", "-hm"]) 54 | of.write("mem:\n" + meminfo + "\n" ) 55 | 56 | hostinfo = sp.check_output(["hostname"]) 57 | of.write("host: " + hostinfo + "\n") 58 | 59 | 60 | eSec = time.time() 61 | of.write( "took: " + "{0:.2f}s".format(eSec - sSec) + "\n" ) 62 | 63 | 64 | envVar = sp.check_output(["printenv"]) 65 | of.write("environment variables:\n" + envVar + "\n" ) 66 | 67 | 68 | ofId = of.finish() 69 | arv.current_task().set_output( ofId ) 70 | 71 | 72 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq1.fa: -------------------------------------------------------------------------------- 1 | >chr1:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq10.fa: -------------------------------------------------------------------------------- 1 | >chr10:500000-500100 2 | gaatttggagctgagaggcaataaattgataGAGGTGCATTTGGTGACTC 3 | CAAAAGAGGCAGATCTTTCTCGTTTTTTCTGCTGTGGCTGCTAACAGTTC 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq11.fa: -------------------------------------------------------------------------------- 1 | >chr11:500000-500100 2 | TGGAGCATACCTGGCTGTCAGCAGGGCTCCCCTGAGCCAAGCTGCCACGC 3 | CCTTCGCCTGCCAGAGCCCAGAGCCCGGAGCAGCACTCTTCAGGCGGCAG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq12.fa: -------------------------------------------------------------------------------- 1 | >chr12:500000-500100 2 | Gtgtctggcagttggtgctggctattggctgggaagtcttagttctccac 3 | atggcttctcatcttctggtagatggagcttttccactgcatggtggtct 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq13.fa: -------------------------------------------------------------------------------- 1 | >chr13:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq14.fa: -------------------------------------------------------------------------------- 1 | >chr14:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq15.fa: -------------------------------------------------------------------------------- 1 | >chr15:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq16.fa: -------------------------------------------------------------------------------- 1 | >chr16:500000-500100 2 | GCCACCACATGCACAGGCCTTGCCCACACTCAAGAGAGGGGGTCTTAGAA 3 | TTCTGCCTGCTACCATGGTCTAGTCTTGGCACACCTTCCTTGACAACATG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq17.fa: -------------------------------------------------------------------------------- 1 | >chr17:500000-500100 2 | CCCAAAGGGTTGATTCTCAGCTTAAACACTACACTAAGTCATTTAGCAGT 3 | AATGTTTGGGCCGGGTTCTGAGTGTGTGAATTCTGTCCAAAAGCTGAAGA 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq18.fa: -------------------------------------------------------------------------------- 1 | >chr18:500000-500100 2 | CATTGCCAGATGTGGCGAGGGCGCACGGAGAACATCCCCAGTCCCGGAGA 3 | AAACACCCGGCCGCCAGCGCGGGGACCGCTCGGGCCGGGAAACGGCTAGA 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq19.fa: -------------------------------------------------------------------------------- 1 | >chr19:500000-500100 2 | GGCTGGGGTGGGCAGAGAGGAGGCTGGATTTGAGGGATATTTTACAGAAC 3 | ACGGGCCCTCACGGGCCTCAGTCTCCCCATCAGTCCCACAGGGGATGGAG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq2.fa: -------------------------------------------------------------------------------- 1 | >chr2:500000-500100 2 | CCAACGGGTTCACCTGATTGATGCTCAGGATGTTGGCGGGTTGAGTTTGC 3 | TATGTGTGTGATGTGTGGGGACTGACGCAATAACAATGGCATTTAAAGCT 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq20.fa: -------------------------------------------------------------------------------- 1 | >chr20:500000-500100 2 | TTTccaaacaagatcaataaaaagaaatccaaaaccaaacatataattac 3 | aaaatagcaggaaaaaacaaagatgagttgggtgtaatggcacaggccta 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq21.fa: -------------------------------------------------------------------------------- 1 | >chr21:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq22.fa: -------------------------------------------------------------------------------- 1 | >chr22:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq3.fa: -------------------------------------------------------------------------------- 1 | >chr3:500000-500100 2 | TTTTTAGAATCCAAAGAGGTATTTACATATTCTGTAGCTCATTGTAGCAG 3 | AACATTTTTCTATCTCTCTAAAGATTATGTACACAATCTTTAAGAGCATT 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq4.fa: -------------------------------------------------------------------------------- 1 | >chr4:500000-500100 2 | ACTGTCTGAGCCGAGATATGATGGGTGAGTGGGGACTCCCCAGGTGGGCA 3 | GAACAGGCTGGTAGCTTCTGCACATGGCAGAGCTGTAGGACTCCACAGCC 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq5.fa: -------------------------------------------------------------------------------- 1 | >chr5:500000-500100 2 | CTACAGAGATACCATGGGAGTGTGGAGGGGCCACTGCACCTCAAAGAAGA 3 | AGGAACCCACAGCCCTGGGCCCCGTCGGAGCCACCACAGGCGCAGGGCAC 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq6.fa: -------------------------------------------------------------------------------- 1 | >chr6:500000-500100 2 | ACGCACACACACACAGAGAATATATATAGATAAGTATCTATATATACTGT 3 | AAAACATATACACACTGTAAAACATATACGGTAAAACATATATACTGTAA 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq7.fa: -------------------------------------------------------------------------------- 1 | >chr7:500000-500100 2 | TTCTCCCGCAGCTTCCAGGACCAGCAGCTCCCCAGAAGGCGTTGTCTCCT 3 | CTGGCTGTGGGCTCAGCCATGGTGGGGCTGGCAGAAGGAGGCGGGAGGAG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq8.fa: -------------------------------------------------------------------------------- 1 | >chr8:500000-500100 2 | Gtgtatattttatgtgaaaagtttaatccattacattgaaggcaattatt 3 | gatatatgaagccttatctttgtcattttattaattgatttctggttgtt 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seq9.fa: -------------------------------------------------------------------------------- 1 | >chr9:500000-500100 2 | ccttgatgatgataatatttccttctggtagagagagaacatcttttgca 3 | tgggaattttatctgctgcttttaagaaacagaaggaagggtagagtgat 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seqM.fa: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seqM.fa -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seqX.fa: -------------------------------------------------------------------------------- 1 | >chrX:500000-500100 2 | ctcgggaggctgaggccggagaatctcttgaacccgggaggcagaggttg 3 | ccgtgagccgagaccatgccgttgcactccagcctgggcaacaggagtgt 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/1-input-pipeline-data/seqY.fa: -------------------------------------------------------------------------------- 1 | >chrY:500000-500100 2 | ccgggttcaagcgattctcctgcctcagcctccctagtaggtgggattac 3 | aggtgcccgccaccacgcctggctaatttttgtatttttactagaaacgg 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/grep: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import arvados 4 | import re 5 | 6 | arvados.job_setup.one_task_per_input_file(if_sequence=0, and_end_task=True) 7 | 8 | this_job = arvados.current_job() 9 | this_task = arvados.current_task() 10 | this_task_input = this_task['parameters']['input'] 11 | pattern = re.compile(this_job['script_parameters']['pattern']) 12 | 13 | input_file = list(arvados.CollectionReader(this_task_input).all_files())[0] 14 | out = arvados.CollectionWriter() 15 | out.set_current_file_name(input_file.decompressed_name()) 16 | out.set_current_stream_name(input_file.stream_name()) 17 | for line in input_file.readlines(): 18 | if pattern.search(line): 19 | out.write(line) 20 | 21 | this_task.set_output(out.finish()) 22 | 23 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/grep.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "depressant pigeonholed fisticuffs", 3 | "components": { 4 | "mattered": { 5 | "script_parameters": { 6 | "pattern": "TATATATAGATAAGTATCT", 7 | "input": { 8 | "required" : true, 9 | "dataclass" : "Collection" 10 | } 11 | }, 12 | "script_version": "master", 13 | "repository": "$USER", 14 | "script": "examples/1-input-pipeline/grep" 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/1-input-pipeline/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | 12 | pipeline_template="grep.pipeline" 13 | 14 | # An example dataset to process. The data set is small and is 15 | # in this subtree of the repository, so we could just reference it directly 16 | # in our script, but the idea is to show how you would use data in your 17 | # crunch script from the keep store. 18 | # 19 | DATAUUID=`arv keep put --no-progress 1-input-pipeline-data` 20 | echo "data uuid $DATAUUID" 21 | 22 | # Register the pipeline with Arvados 23 | # 24 | template=` cat $pipeline_template | json -E "this.components.mattered.repository=\"$USER\"" ` 25 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 26 | echo "pipline template:" $ARVUUID 27 | 28 | # And run the pipeline here 29 | # 30 | echo "running: 'arv pipeline run --run-here --no-reuse --template $ARVUUID mattered::input=$DATAUUID'" 31 | arv pipeline run --run-here --no-reuse --template $ARVUUID mattered::input=$DATAUUID 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq1.fa: -------------------------------------------------------------------------------- 1 | >chr1:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq10.fa: -------------------------------------------------------------------------------- 1 | >chr10:500000-500100 2 | gaatttggagctgagaggcaataaattgataGAGGTGCATTTGGTGACTC 3 | CAAAAGAGGCAGATCTTTCTCGTTTTTTCTGCTGTGGCTGCTAACAGTTC 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq11.fa: -------------------------------------------------------------------------------- 1 | >chr11:500000-500100 2 | TGGAGCATACCTGGCTGTCAGCAGGGCTCCCCTGAGCCAAGCTGCCACGC 3 | CCTTCGCCTGCCAGAGCCCAGAGCCCGGAGCAGCACTCTTCAGGCGGCAG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq12.fa: -------------------------------------------------------------------------------- 1 | >chr12:500000-500100 2 | Gtgtctggcagttggtgctggctattggctgggaagtcttagttctccac 3 | atggcttctcatcttctggtagatggagcttttccactgcatggtggtct 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq13.fa: -------------------------------------------------------------------------------- 1 | >chr13:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq14.fa: -------------------------------------------------------------------------------- 1 | >chr14:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq15.fa: -------------------------------------------------------------------------------- 1 | >chr15:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq16.fa: -------------------------------------------------------------------------------- 1 | >chr16:500000-500100 2 | GCCACCACATGCACAGGCCTTGCCCACACTCAAGAGAGGGGGTCTTAGAA 3 | TTCTGCCTGCTACCATGGTCTAGTCTTGGCACACCTTCCTTGACAACATG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq17.fa: -------------------------------------------------------------------------------- 1 | >chr17:500000-500100 2 | CCCAAAGGGTTGATTCTCAGCTTAAACACTACACTAAGTCATTTAGCAGT 3 | AATGTTTGGGCCGGGTTCTGAGTGTGTGAATTCTGTCCAAAAGCTGAAGA 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq18.fa: -------------------------------------------------------------------------------- 1 | >chr18:500000-500100 2 | CATTGCCAGATGTGGCGAGGGCGCACGGAGAACATCCCCAGTCCCGGAGA 3 | AAACACCCGGCCGCCAGCGCGGGGACCGCTCGGGCCGGGAAACGGCTAGA 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq19.fa: -------------------------------------------------------------------------------- 1 | >chr19:500000-500100 2 | GGCTGGGGTGGGCAGAGAGGAGGCTGGATTTGAGGGATATTTTACAGAAC 3 | ACGGGCCCTCACGGGCCTCAGTCTCCCCATCAGTCCCACAGGGGATGGAG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq2.fa: -------------------------------------------------------------------------------- 1 | >chr2:500000-500100 2 | CCAACGGGTTCACCTGATTGATGCTCAGGATGTTGGCGGGTTGAGTTTGC 3 | TATGTGTGTGATGTGTGGGGACTGACGCAATAACAATGGCATTTAAAGCT 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq20.fa: -------------------------------------------------------------------------------- 1 | >chr20:500000-500100 2 | TTTccaaacaagatcaataaaaagaaatccaaaaccaaacatataattac 3 | aaaatagcaggaaaaaacaaagatgagttgggtgtaatggcacaggccta 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq21.fa: -------------------------------------------------------------------------------- 1 | >chr21:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq22.fa: -------------------------------------------------------------------------------- 1 | >chr22:500000-500100 2 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 3 | NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq3.fa: -------------------------------------------------------------------------------- 1 | >chr3:500000-500100 2 | TTTTTAGAATCCAAAGAGGTATTTACATATTCTGTAGCTCATTGTAGCAG 3 | AACATTTTTCTATCTCTCTAAAGATTATGTACACAATCTTTAAGAGCATT 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq4.fa: -------------------------------------------------------------------------------- 1 | >chr4:500000-500100 2 | ACTGTCTGAGCCGAGATATGATGGGTGAGTGGGGACTCCCCAGGTGGGCA 3 | GAACAGGCTGGTAGCTTCTGCACATGGCAGAGCTGTAGGACTCCACAGCC 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq5.fa: -------------------------------------------------------------------------------- 1 | >chr5:500000-500100 2 | CTACAGAGATACCATGGGAGTGTGGAGGGGCCACTGCACCTCAAAGAAGA 3 | AGGAACCCACAGCCCTGGGCCCCGTCGGAGCCACCACAGGCGCAGGGCAC 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq6.fa: -------------------------------------------------------------------------------- 1 | >chr6:500000-500100 2 | ACGCACACACACACAGAGAATATATATAGATAAGTATCTATATATACTGT 3 | AAAACATATACACACTGTAAAACATATACGGTAAAACATATATACTGTAA 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq7.fa: -------------------------------------------------------------------------------- 1 | >chr7:500000-500100 2 | TTCTCCCGCAGCTTCCAGGACCAGCAGCTCCCCAGAAGGCGTTGTCTCCT 3 | CTGGCTGTGGGCTCAGCCATGGTGGGGCTGGCAGAAGGAGGCGGGAGGAG 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq8.fa: -------------------------------------------------------------------------------- 1 | >chr8:500000-500100 2 | Gtgtatattttatgtgaaaagtttaatccattacattgaaggcaattatt 3 | gatatatgaagccttatctttgtcattttattaattgatttctggttgtt 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seq9.fa: -------------------------------------------------------------------------------- 1 | >chr9:500000-500100 2 | ccttgatgatgataatatttccttctggtagagagagaacatcttttgca 3 | tgggaattttatctgctgcttttaagaaacagaaggaagggtagagtgat 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seqM.fa: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seqM.fa -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seqX.fa: -------------------------------------------------------------------------------- 1 | >chrX:500000-500100 2 | ctcgggaggctgaggccggagaatctcttgaacccgggaggcagaggttg 3 | ccgtgagccgagaccatgccgttgcactccagcctgggcaacaggagtgt 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/2-chain-pipeline-data/seqY.fa: -------------------------------------------------------------------------------- 1 | >chrY:500000-500100 2 | ccgggttcaagcgattctcctgcctcagcctccctagtaggtgggattac 3 | aggtgcccgccaccacgcctggctaatttttgtatttttactagaaacgg 4 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/chain-example.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "plumb uneasiest compile", 3 | 4 | "components": { 5 | 6 | "unfetter2": { 7 | "script_parameters": { 8 | "input": { 9 | "output_of": "transverses" 10 | } 11 | }, 12 | "script_version": "master", 13 | "repository": "$USER", 14 | "script": "examples/2-chain-pipeline/post-process.py" 15 | }, 16 | 17 | "transverses": { 18 | "script_parameters": { 19 | "pattern": "TATATATAGATAAGTATCT", 20 | "input": { 21 | "required" : true, 22 | "dataclass" : "Collection" 23 | } 24 | }, 25 | "repository": "$USER", 26 | "script_version": "master", 27 | "script": "examples/2-chain-pipeline/grep" 28 | } 29 | 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/grep: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import arvados 4 | import re 5 | 6 | arvados.job_setup.one_task_per_input_file(if_sequence=0, and_end_task=True) 7 | 8 | this_job = arvados.current_job() 9 | this_task = arvados.current_task() 10 | this_task_input = this_task['parameters']['input'] 11 | pattern = re.compile(this_job['script_parameters']['pattern']) 12 | 13 | input_file = list(arvados.CollectionReader(this_task_input).all_files())[0] 14 | out = arvados.CollectionWriter() 15 | out.set_current_file_name(input_file.decompressed_name()) 16 | out.set_current_stream_name(input_file.stream_name()) 17 | for line in input_file.readlines(): 18 | if pattern.search(line): 19 | out.write(line) 20 | 21 | this_task.set_output(out.finish()) 22 | 23 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/post-process.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | 4 | import arvados 5 | import re 6 | 7 | arvados.job_setup.one_task_per_input_file(if_sequence=0, and_end_task=True) 8 | 9 | this_job = arvados.current_job() 10 | this_task = arvados.current_task() 11 | this_task_input = this_task['parameters']['input'] 12 | 13 | input_file = list( arvados.CollectionReader(this_task_input).all_files() )[0] 14 | 15 | out = arvados.CollectionWriter() 16 | out.set_current_file_name(input_file.decompressed_name()) 17 | out.set_current_stream_name(input_file.stream_name()) 18 | for line in input_file.readlines(): 19 | out.write( "!!!" + line.upper() ) 20 | 21 | this_task.set_output(out.finish()) 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/2-chain-pipeline/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | 12 | pipeline_template="chain-example.pipeline" 13 | 14 | # An example dataset to process. The data set is small and is 15 | # in this subtree of the repository, so we could just reference it directly 16 | # in our script, but the idea is to show how you would use data in your 17 | # crunch script from the keep store. 18 | # 19 | DATAUUID=`arv keep put --no-progress 2-chain-pipeline-data` 20 | 21 | # Register the pipeline with Arvados 22 | # 23 | template=` cat $pipeline_template | json -E "this.components.transverses.repository=\"$USER\"" | json -E "this.components.unfetter2.repository=\"$USER\"" ` 24 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 25 | echo "pipline template:" $ARVUUID 26 | 27 | # And run the pipeline here 28 | # 29 | arv pipeline run --run-here --template $ARVUUID transverses::input=$DATAUUID 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/3-multiple-input/3-multiple-input-data/input0.txt: -------------------------------------------------------------------------------- 1 | input0 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/3-multiple-input/3-multiple-input-data/input1.txt: -------------------------------------------------------------------------------- 1 | input1 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/3-multiple-input/multiInput.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "plushest resisting neuron", 3 | "components": { 4 | 5 | "heartlessness": { 6 | "script_parameters": { 7 | 8 | "inputA": { "required" : true, "dataclass" : "Collection" }, 9 | "inputB": { "required" : true, "dataclass" : "Collection" } 10 | 11 | }, 12 | "script_version": "master", 13 | "repository": "$USER", 14 | "script": "examples/3-multiple-input/multiInput.py" 15 | } 16 | 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/3-multiple-input/multiInput.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | 4 | import arvados 5 | import re 6 | 7 | this_job = arvados.current_job() 8 | this_task = arvados.current_task() 9 | 10 | this_job_inputA = this_job['script_parameters']['inputA'] 11 | this_job_inputB = this_job['script_parameters']['inputB'] 12 | 13 | # use fileA and fileB as strings to file ... 14 | # 15 | #fileA = arvados.get_task_param_mount( 'inputA' ) 16 | #fileB = arvados.get_task_param_mount( 'inputB' ) 17 | 18 | 19 | input_fileA = list( arvados.CollectionReader( this_job_inputA ).all_files() )[0] 20 | input_fileB = list( arvados.CollectionReader( this_job_inputB ).all_files() )[0] 21 | 22 | out = arvados.CollectionWriter() 23 | 24 | 25 | out.set_current_file_name("output.txt") 26 | out.set_current_stream_name("results") 27 | 28 | for line in input_fileA.readlines(): 29 | out.write( "text from 'inputA': " + line.upper() ) 30 | 31 | for line in input_fileB.readlines(): 32 | out.write( "text from 'inputB': " + line.upper() ) 33 | 34 | this_task.set_output(out.finish()) 35 | 36 | 37 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/3-multiple-input/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | 12 | pipeline_template="multiInput.pipeline" 13 | 14 | # An example dataset to process. The data set is small and is 15 | # in this subtree of the repository, so we could just reference it directly 16 | # in our script, but the idea is to show how you would use data in your 17 | # crunch script from the keep store. 18 | # 19 | DATAUUID0=`arv keep put --no-progress 3-multiple-input-data/input0.txt` 20 | DATAUUID1=`arv keep put --no-progress 3-multiple-input-data/input1.txt` 21 | echo "input0.txt uuid $DATAUUID0" 22 | echo "input1.txt uuid $DATAUUID1" 23 | 24 | # Register the pipeline with Arvados 25 | # 26 | 27 | template=`cat $pipeline_template | json -E "this.components.heartlessness.repository=\"$USER\"" ` 28 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 29 | echo "pipline template:" $ARVUUID 30 | 31 | # And run the pipeline here. 32 | # 33 | echo "running: 'arv pipeline run --run-here --no-reuse --template $ARVUUID heartlessness::inputA=$DATAUUID0 heartlessness::inputB=$DATAUUID1'" 34 | arv pipeline run --run-here --no-reuse --template $ARVUUID heartlessness::inputA=$DATAUUID0 heartlessness::inputB=$DATAUUID1 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/4-mount-input/README: -------------------------------------------------------------------------------- 1 | A simple test showing how to use files from the arvados mount point in your crunch script. 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/4-mount-input/README~: -------------------------------------------------------------------------------- 1 | A simple test showing how to use files from an arv mount in your crunch script. 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/4-mount-input/sampleA/seqA.fa: -------------------------------------------------------------------------------- 1 | >chr10:500000-500100 2 | gaatttggagctgagaggcaataaattgataGAGGTGCATTTGGTGACTC 3 | CAAAAGAGGCAGATCTTTCTCGTTTTTTCTGCTGTGGCTGCTAACAGTTC 4 | 5 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/4-mount-input/sampleB/seqB.fa: -------------------------------------------------------------------------------- 1 | >chr7:500000-500100 2 | TTCTCCCGCAGCTTCCAGGACCAGCAGCTCCCCAGAAGGCGTTGTCTCCT 3 | CTGGCTGTGGGCTCAGCCATGGTGGGGCTGGCAGAAGGAGGCGGGAGGAG 4 | 5 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/mountInput.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "flowers initiating forgot", 3 | "components": { 4 | "becks": { 5 | "script_parameters": { 6 | 7 | "DataUUID" : { "required" : true, "dataclass" : "Collection" }, 8 | "inputA": { "required" : false, "dataclass" : "Collection" }, 9 | "inputB": { "required" : false, "dataclass" : "Collection" } 10 | 11 | }, 12 | 13 | "script_version": "master", 14 | "repository": "$USER", 15 | "script": "examples/4-mount-input/mountInput.py" 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/mountInput.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | 4 | import arvados 5 | import re 6 | import os 7 | 8 | this_job = arvados.current_job() 9 | this_task = arvados.current_task() 10 | 11 | this_job_input = this_job['script_parameters']['DataUUID'] 12 | 13 | inputPath = os.path.join( os.environ['TASK_KEEPMOUNT'], this_job_input ) 14 | 15 | out = arvados.CollectionWriter() 16 | 17 | out.set_current_stream_name( "results" ) 18 | out.set_current_file_name( "output.txt" ) 19 | 20 | out.write( "input path : " + str(inputPath) + "\n" ) 21 | 22 | 23 | readme_fp = open( inputPath + "/README" ) 24 | out.write( readme_fp.read() ) 25 | readme_fp.close() 26 | 27 | out.write( "\n\n" ) 28 | 29 | sampleA_fp = open( inputPath + "/sampleA/seqA.fa" ) 30 | out.write( sampleA_fp.read() ) 31 | sampleA_fp.close() 32 | 33 | out.write( "\n\n" ) 34 | 35 | sampleB_fp = open( inputPath + "/sampleB/seqB.fa" ) 36 | out.write( sampleB_fp.read() ) 37 | sampleB_fp.close() 38 | 39 | out.write( "\n\n" ) 40 | 41 | this_task.set_output(out.finish()) 42 | 43 | 44 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/4-mount-input/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | 12 | pipeline_template="mountInput.pipeline" 13 | 14 | # An example dataset to process. The data set is small and is 15 | # in this subtree of the repository, so we could just reference it directly 16 | # in our script, but the idea is to show how you would use data in your 17 | # crunch script from the keep store. 18 | # 19 | DATAUUID=`arv keep put --no-progress 4-mount-input` 20 | echo "data uuid $DATAUUID" 21 | 22 | # Register the pipeline with Arvados 23 | # 24 | template=` cat $pipeline_template | json -E "this.components.becks.repository=\"$USER\"" ` 25 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 26 | echo "pipline template:" $ARVUUID 27 | 28 | # And run the pipeline here. 29 | # 30 | echo "arv pipeline run --run-here --no-reuse --template $ARVUUID becks::DataUUID=$DATAUUID" 31 | arv pipeline run --run-here --no-reuse --template $ARVUUID becks::DataUUID=$DATAUUID 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/A/A1.txt: -------------------------------------------------------------------------------- 1 | 1 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/A/A2.txt: -------------------------------------------------------------------------------- 1 | 2 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/A/A3.txt: -------------------------------------------------------------------------------- 1 | 3 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/A/A4.txt: -------------------------------------------------------------------------------- 1 | 4 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/A/A5.txt: -------------------------------------------------------------------------------- 1 | 5 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B100.txt: -------------------------------------------------------------------------------- 1 | 100 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B101.txt: -------------------------------------------------------------------------------- 1 | 101 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B102.txt: -------------------------------------------------------------------------------- 1 | 102 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B103.txt: -------------------------------------------------------------------------------- 1 | 103 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B104.txt: -------------------------------------------------------------------------------- 1 | 104 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B105.txt: -------------------------------------------------------------------------------- 1 | 105 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/5-custom-task-data/B/B106.txt: -------------------------------------------------------------------------------- 1 | 106 2 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/customTask.pipeline: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Desdemona foundering nobody", 3 | "components": { 4 | "denouement": { 5 | "script_parameters": { 6 | "MountUUID": { "required" : true , "dataclass" : "Collection" } 7 | }, 8 | "script_version": "master", 9 | "repository": "$USER", 10 | "script": "examples/5-custom-task/customTask.py" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/customTask.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | 4 | import arvados as arv 5 | import re 6 | import os 7 | 8 | job_uuid = os.environ['JOB_UUID'] 9 | task_uuid = os.environ['TASK_UUID'] 10 | work_dir = os.environ['JOB_WORK'] 11 | 12 | 13 | task_qsequence = os.environ['TASK_QSEQUENCE'] 14 | task_sequence = os.environ['TASK_SEQUENCE'] 15 | 16 | this_job = arv.current_job() 17 | 18 | seq = int(task_sequence) 19 | 20 | # This is the parent task, so create a bunch of jobs 21 | # that will process on the input we specify. 22 | # 23 | if seq == 0 : 24 | 25 | taskName = 0 26 | mount_uuid = this_job['script_parameters']['MountUUID'] 27 | for a in range(1,6): 28 | for b in range(100,107): 29 | new_task_attributes = { 30 | 'job_uuid' : job_uuid, 31 | 'created_by_job_task_uuid' : task_uuid, 32 | 'sequence' : 1, 33 | 'parameters' : { 'MountUUID' : mount_uuid, 'filenameA' : "A/A" + str(a) + ".txt" , 'filenameB' : "B/B" + str(b) + ".txt", 'taskName' : str(taskName) } 34 | } 35 | taskName += 1 36 | 37 | # Queue up the task 38 | # 39 | arv.api('v1').job_tasks().create( body = new_task_attributes ).execute() 40 | 41 | # Exit this parent task with a 'success' message 42 | # 43 | arv.api('v1').job_tasks().update( uuid = task_uuid, body={'success':True }).execute() 44 | exit(0) 45 | 46 | 47 | # From here down is the child task created by the parent task 48 | # 49 | 50 | this_task = arv.current_task() 51 | 52 | mount_dir = os.path.join( os.environ['TASK_KEEPMOUNT'], this_task['parameters']['MountUUID'] ) 53 | 54 | filenameA = this_task["parameters"]["filenameA"] 55 | filenameB = this_task["parameters"]["filenameB"] 56 | taskName = this_task["parameters"]["taskName"] 57 | 58 | 59 | out = arv.CollectionWriter() 60 | out.set_current_stream_name( "output/ofjob" ) 61 | out.set_current_file_name( "output" + str(taskName) + ".txt" ) 62 | 63 | fa = open( os.path.join( mount_dir, filenameA ) ) 64 | outputA = fa.read() 65 | fa.close() 66 | 67 | fb = open( os.path.join( mount_dir, filenameB ) ) 68 | outputB = fb.read() 69 | fb.close() 70 | 71 | 72 | out.write( "seq " + str(seq) + ", taskName " + str(taskName) + "\n" ) 73 | out.write( str(outputA) + "\n" ) 74 | out.write( str(outputB) + "\n" ) 75 | 76 | this_task.set_output(out.finish()) 77 | 78 | 79 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/5-custom-task/register_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Register the pipeline template with arvados and immediately 4 | # run from the command line. 5 | # 6 | # This script assumes the script has been checked into the repository. 7 | # 8 | # This also requires the 'json' tool to parse json. See: https://github.com/trentm/json 9 | # 10 | 11 | 12 | pipeline_template="customTask.pipeline" 13 | 14 | # An example dataset to process. The data set is small and is 15 | # in this subtree of the repository, so we could just reference it directly 16 | # in our script, but the idea is to show how you would use data in your 17 | # crunch script from the keep store. 18 | # 19 | DATAUUID=`arv keep put --no-progress 5-custom-task-data` 20 | echo "data uuid $DATAUUID" 21 | 22 | # Register the pipeline with Arvados 23 | # 24 | template=` cat $pipeline_template | json -E "this.components.denouement.repository=\"$USER\"" ` 25 | ARVUUID=`arv pipeline_template create --pipeline-template "$template" | json uuid` 26 | echo "pipline template:" $ARVUUID 27 | 28 | # And run the pipeline here. 29 | # 30 | echo "arv pipeline run --run-here --no-reuse --template $ARVUUID denouement::MountUUID=$DATAUUID" 31 | arv pipeline run --run-here --no-reuse --template $ARVUUID denouement::MountUUID=$DATAUUID 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /experimental/abram/crunch_examples/README.md: -------------------------------------------------------------------------------- 1 | # Crunch Examples 2 | 3 | This is a set of sample crunch jobs meant for illustrative purposes. 4 | 5 | * [0-simple-pipeline](0-simple-pipeline) 6 | * [1-input-pipeline](1-input-pipeline) 7 | * [2-chain-pipeline](2-chain-pipeline) 8 | * [3-multiple-input](3-multiple-input) 9 | * [4-mount-input](4-mount-input) 10 | * [5-custom-task](5-custom-task) 11 | 12 | -------------------------------------------------------------------------------- /experimental/abram/extend_polyphen_with_hg19.go: -------------------------------------------------------------------------------- 1 | /* 2 | sample usage: 3 | 4 | time pigz -c -d /scratch2/awz/polyphen_extract.gz | ./extend_polyphen_with_hg19 | pigz -c - > /scratch2/awz/polyphen_w_hg18ref.gz 5 | 6 | */ 7 | 8 | package main 9 | 10 | import "fmt" 11 | import "os" 12 | 13 | import "io/ioutil" 14 | import "bytes" 15 | import _ "strings" 16 | import "strconv" 17 | 18 | import "./aux" 19 | import "./bioenv" 20 | 21 | var CHR = []string{ "chr1", "chr2", "chr3", "chr4", "chr5", "chr6", "chr7", "chr8", "chr9", "chr10", 22 | "chr11", "chr12", "chr13", "chr14", "chr15", "chr16", "chr17", "chr18", "chr19", "chr20", 23 | "chr21", "chr22", "chrX", "chrY", "chrM" } 24 | 25 | var CHRFA = map[string][]byte{} 26 | 27 | func main() { 28 | 29 | benv,_ := bioenv.BioEnv() 30 | basefadir := benv["dir:hg19.fa"] 31 | 32 | for i:=0; i= len(CHRFA[ string(chr) ])) { 75 | panic( fmt.Sprintf("OOB pos0ref %d, len(CHRFA[%s]) = %d, (pos1ref %d), line %d\n", pos0ref, string(chr), len(CHRFA[ string(chr) ]), pos1ref, i) ) 76 | } 77 | 78 | //fmt.Printf("%s %s (%s{%s} %d{%s})\n", string(lines[i]), string(CHRFA[ string(chr) ][pos]), chr, chr_pos[0], pos, chr_pos[1] ) 79 | fmt.Printf("%s\t%s\n", string(lines[i]), string(CHRFA[ string(chr) ][pos0ref]) ) 80 | 81 | tot++ 82 | //if tot > 10 { break } 83 | 84 | } 85 | //fmt.Println("#", tot) 86 | 87 | 88 | } 89 | -------------------------------------------------------------------------------- /experimental/beacon/.gitignore: -------------------------------------------------------------------------------- 1 | databaseconfig.py 2 | .env 3 | *.abv 4 | bin 5 | lib 6 | include 7 | *.bkup 8 | -------------------------------------------------------------------------------- /experimental/beacon/Procfile: -------------------------------------------------------------------------------- 1 | web: python WTFisThisRegister.py 2 | -------------------------------------------------------------------------------- /experimental/beacon/README.md: -------------------------------------------------------------------------------- 1 | Beacon 2 | ================= 3 | A beacon. See http://ga4gh.org/#/beacon 4 | 5 | Version 6 | ----- 7 | 0.1 Nyan - Does single allele lookups on a single genome, with the reference genome being hg19. 8 | 9 | -------------------------------------------------------------------------------- /experimental/beacon/requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==0.10.1 2 | Flask-SQLAlchemy==2.0 3 | Jinja2==2.7.2 4 | MarkupSafe==0.18 5 | SQLAlchemy==0.9.7 6 | Werkzeug==0.9.4 7 | argparse==1.2.1 8 | gunicorn==18.0 9 | itsdangerous==0.23 10 | psycopg2==2.5.4 11 | wsgiref==0.1.2 12 | -------------------------------------------------------------------------------- /experimental/beacon/static/fav.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/beacon/static/fav.ico -------------------------------------------------------------------------------- /experimental/beacon/static/nyancat.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/beacon/static/nyancat.gif -------------------------------------------------------------------------------- /experimental/beacon/static/style.css: -------------------------------------------------------------------------------- 1 | body { font-family: sans-serif; background: #eee; } 2 | a, h1, h2 { color: #377ba8; } 3 | h1, h2 { font-family: 'Georgia', serif; } 4 | h1 { border-bottom: 2px solid #eee; margin: 0; } 5 | h2 { font-size: 1.2em; margin-top:0.5em; } 6 | 7 | .page { margin: 2em auto; width: 70%; border: 5px solid #ccc; 8 | border-radius:5px; 9 | min-width:35em; 10 | padding: 0.8em; background: white; } 11 | .entries { list-style: none; margin: 0; padding: 0; } 12 | .entries li { margin: 0em 1.2em; } 13 | .entries li h2 { margin-left: -1em; font-family: monospace; font-size: 2em} 14 | .helptext { font-family: monospace; 15 | word-wrap: break-word; /* IE>=5.5 */ 16 | white-space: pre; /* IE>=6 */ 17 | white-space: -moz-pre-wrap; /* For Fx<=2 */ 18 | white-space: pre-wrap; /* Fx>3, Opera>8, Safari>3*/ } 19 | .add-entry { font-size: 0.9em; border-bottom: 1px solid #ccc; } 20 | .add-entry dl { font-weight: bold; } 21 | .delete-entry { font-weight: bold; margin: 1em; } 22 | .metanav { text-align: right; font-size: 0.8em; padding: 0.3em; 23 | margin-bottom: 1em; background: #fafafa; } 24 | .flash { background: #cee5F5; padding: 0.5em; 25 | border: 1px solid #aacbe2; margin:0.5em;} 26 | .error { background: #f0d6d6; padding: 0.5em; } 27 | 28 | table, th, td { background-color: transparent; margin: 10px; padding: 6px; line-height: 1.42857143; border-top: 1px solid #ddd; 29 | border-collapse: collapse; /*border: 1px solid #ddd;*/} 30 | 31 | input[type=submit] { cursor:pointer; background-color: #377ba8; border:1px solid #000; border-radius: 10px; padding:6px; color:#fff; } 32 | input[type=submit]:hover { background-color: #2f6080; } 33 | input[type=submit]:focus {background-color: #eee; } 34 | 35 | .rotate90 { 36 | -webkit-transform: rotate(-90deg); 37 | -moz-transform: rotate(-90deg); 38 | -o-transform: rotate(-90deg); 39 | -ms-transform: rotate(-90deg); 40 | transform: rotate(-90deg); 41 | } 42 | -------------------------------------------------------------------------------- /experimental/beacon/templates/botsearch.html: -------------------------------------------------------------------------------- 1 | {# population, genome, chromosome, coordinate, and allelle are the inputs 2 | #} 3 | 4 | 5 | {% if flashmsg is defined %} 6 | Result: {{ flashmsg }} 7 | {% else %} 8 | Result: Error! 9 | {% endif %} 10 | 11 | 12 | -------------------------------------------------------------------------------- /experimental/beacon/templates/layout.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Curoverse Beacon 5 | 6 |
7 |

Curoverse Beacon

8 |
9 | LIGHTNING || Dev3 Home || Dev4 Home ||
10 | BEACON || beacon search || 11 | population info || 12 | beacons faq || 13 |
14 | {% for message in get_flashed_messages() %} 15 |
{{ message }}
16 | {% endfor %} 17 | {% block body %} {% endblock %} 18 |
19 | 20 | 21 | -------------------------------------------------------------------------------- /experimental/beacon/templates/people.html: -------------------------------------------------------------------------------- 1 | {% extends "layout.html" %} 2 | {% block body %} 3 |

Population

4 | The following {{people|length}} human genomes from the Personal Genome Project are searched:
5 | {% for p in people %} 6 | {{p}}
7 | {% endfor %} 8 |
9 | In more detail, the following data is searched:

10 | {% for n in npyfnames%} 11 | {{ n }}
12 | {% endfor %} 13 | {% endblock %} 14 | -------------------------------------------------------------------------------- /experimental/createTagAndPosFromBandBedGraph.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "os" 4 | import "fmt" 5 | import "path/filepath" 6 | 7 | var x string = "hello" 8 | 9 | var bandBounds map[string]map[int][2]int 10 | 11 | func visit(path string, f os.FileInfo, err error) error { 12 | 13 | mode := f.Mode() 14 | if mode.IsDir() { return nil } 15 | 16 | fmt.Printf("visited %s\n", path) 17 | return nil 18 | } 19 | 20 | func main() { 21 | user := os.Getenv("USER") 22 | bedGraphDir := fmt.Sprintf("/scratch/%s/bedGraph", user) 23 | 24 | err := filepath.Walk( bedGraphDir, visit ) 25 | _ = err 26 | 27 | bandBounds = make( map[string]map[int][2]int ) 28 | 29 | fmt.Println(x) 30 | 31 | } 32 | -------------------------------------------------------------------------------- /experimental/lantern/flint.go: -------------------------------------------------------------------------------- 1 | /* some simple tests of lantern functionality, not to be compiled with lantern */ 2 | /* 3 | to compile: 4 | 5 | go build flint.go lantern_tile.go 6 | 7 | to run: 8 | 9 | ./flint 10 | 11 | */ 12 | 13 | package main 14 | 15 | import "fmt" 16 | import "log" 17 | 18 | func flint() { 19 | 20 | fmt.Printf("initializing\n") 21 | 22 | e := TileSimpleInit() 23 | if e!=nil { log.Fatal(e) } 24 | 25 | fmt.Printf("init done\n\n") 26 | 27 | //tileid := "241.00.0000.0005" 28 | tileid := "247.00.0000.0000" 29 | 30 | count:=0 31 | for k := range gTileCache.TileIDSeqMap { 32 | fmt.Printf("%s\n", k ) 33 | count++ 34 | if count>5 { break } 35 | } 36 | 37 | 38 | fmt.Printf("getting tile sequence '%s'\n", tileid) 39 | 40 | s,e := GetTileSeq( tileid ) 41 | if e!=nil { log.Fatal(e) } 42 | 43 | fmt.Printf(">>> %s\n", s); 44 | 45 | TileStatsPrint() 46 | } 47 | -------------------------------------------------------------------------------- /experimental/lantern/lantern-0-0-1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/lantern/lantern-0-0-1 -------------------------------------------------------------------------------- /experimental/lantern/lantern_common_error.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "io" 4 | import "fmt" 5 | import "net/http" 6 | 7 | 8 | 9 | func _errp( w http.ResponseWriter ) { 10 | 11 | io.WriteString(w, "{\n") 12 | io.WriteString(w, " \"Type\":\"failure\", \"Message\":\"parse failure\"\n") 13 | io.WriteString(w, "}") 14 | 15 | } 16 | 17 | func _errm( w http.ResponseWriter ) { 18 | 19 | io.WriteString(w, "{\n") 20 | io.WriteString(w, " \"Type\":\"failure\", \"Message\":\"max elements exceeded\"\n") 21 | io.WriteString(w, "}") 22 | 23 | } 24 | 25 | 26 | func _erre( w http.ResponseWriter, e error ) { 27 | 28 | io.WriteString(w, "{\n") 29 | io.WriteString(w, fmt.Sprintf(" \"Type\":\"failure\", \"Message\":\"%v\"\n", e) ) 30 | io.WriteString(w, "}") 31 | 32 | } 33 | 34 | 35 | -------------------------------------------------------------------------------- /experimental/lantern/lantern_parse.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "fmt" 4 | import "strings" 5 | import "strconv" 6 | 7 | // Parses an 'int option' of the form: 8 | // 9 | // (\d+([\+\-]\d+)?)(,(\d+([\+\-]\d+)?))* 10 | // 11 | // into an array of ranges. 12 | // 13 | // The (\d) in the above regexp matches an int 14 | // in the specified base (up to whatever 15 | // strconv.ParseInt accepts) 16 | // 17 | // Here are some examples: 18 | // 19 | // '0' -> [[0,-1]] 20 | // '0+1' -> [[0,1]] 21 | // '0-f' -> [[0,15]] 22 | // 'a-b,3-5' -> [[10,11],[3,5]] 23 | // 'f,4+2,a-c' -> [[15,-1],[4,6],[10,13]] 24 | // 25 | 26 | func parseIntOption( istr string, base int ) ([][2]int64, error) { 27 | r := make( [][2]int64, 0, 8 ) 28 | commaval := strings.Split( istr, "," ) 29 | for i:=0; i 2 { return nil, fmt.Errorf("invalid option %s", commaval[i]) } 35 | 36 | a,ee := strconv.ParseInt( dashval[0], base, 64 ) 37 | if ee!=nil { return nil, fmt.Errorf("invalid option %s: %v", dashval[0], ee ) } 38 | 39 | if len(dashval[1])==0 { 40 | r = append( r, [2]int64{a,-1} ) 41 | continue 42 | } 43 | 44 | b,ee := strconv.ParseInt( dashval[1], base, 64) 45 | if ee!=nil { return nil, fmt.Errorf("invalid option %s: %v", dashval[1], ee ) } 46 | r = append( r, [2]int64{a,b} ) 47 | 48 | } else if strings.Contains( commaval[i], "+" ) { 49 | 50 | plusval := strings.Split( commaval[i], "+" ) 51 | if len(plusval) > 2 { return nil, fmt.Errorf("invalid option %s", commaval[i]) } 52 | 53 | a,ee := strconv.ParseInt( plusval[0], base, 64 ) 54 | if ee!=nil { return nil, fmt.Errorf("invalid option %s: %v", plusval[0], ee ) } 55 | 56 | if len(plusval[1])==0 { 57 | r = append( r, [2]int64{a,-1} ) 58 | continue 59 | } 60 | 61 | b,ee := strconv.ParseInt( plusval[1], base, 64) 62 | if ee!=nil { return nil, fmt.Errorf("invalid option %s: %v", plusval[1], ee ) } 63 | if b<0 { return nil, fmt.Errorf("invalid option %s: %d < 0", plusval[1], b ) } 64 | r = append( r, [2]int64{a,a+b} ) 65 | 66 | 67 | } else { 68 | a,ee := strconv.ParseInt( commaval[i], base, 64 ) 69 | if ee!=nil { return nil, fmt.Errorf("invalid option %s: %v", commaval[i], ee ) } 70 | 71 | r = append( r, [2]int64{a,a+1} ) 72 | } 73 | 74 | } 75 | 76 | return r,nil 77 | } 78 | 79 | -------------------------------------------------------------------------------- /experimental/lantern/lantern_system_info.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import _ "fmt" 4 | import "net/http" 5 | import "encoding/json" 6 | import _ "os" 7 | import "io" 8 | 9 | import "bytes" 10 | 11 | type LanternInfo struct { 12 | LanternVersion string 13 | LibraryVersion string 14 | TileMapVersion string 15 | CGFVersion string 16 | 17 | Stats LanternTileStats 18 | 19 | SampleId []string 20 | 21 | } 22 | 23 | func system_info_handler( w http.ResponseWriter, resp *LanternResponse, req *LanternRequest ) { 24 | 25 | info := LanternInfo{} 26 | info.LanternVersion = VERSION_STR 27 | info.LibraryVersion = gCGF[0].TileLibraryVersion 28 | info.TileMapVersion = gCGF[0].EncodedTileMapMd5Sum 29 | info.CGFVersion = gCGF[0].CGFVersion 30 | info.Stats = gLanternTileStats 31 | info.SampleId = gCGFName 32 | 33 | resp.Type = "success" 34 | resp.Message = "system-info" 35 | 36 | w.Header().Set("Content-Type", "application/json") 37 | res_json_bytes,_ := json.Marshal( info ) 38 | 39 | flatten_json_bytes := bytes.Trim( res_json_bytes, " {}\n") 40 | 41 | io.WriteString(w, "{\n") 42 | io.WriteString(w, " \"Type\":\"success\", \"Message\":\"system-info\",\n") 43 | io.WriteString(w, string(flatten_json_bytes)) 44 | io.WriteString(w, "\n") 45 | io.WriteString(w, "}") 46 | 47 | 48 | } 49 | -------------------------------------------------------------------------------- /experimental/lantern/lantern_tile.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "fmt" 4 | import "../tile_cache" 5 | import "../tile_dbh" 6 | 7 | //var gTileCacheGob string = "./tile_cache.gob" 8 | var gTileCacheCSV string = "./tile_seq_first6.csv" 9 | var gTileCache *tile_cache.TileCache 10 | 11 | var gTileDB string = "./tiledb.sqlite3" 12 | var gTileDBH *tile_dbh.TileDBH 13 | 14 | type LanternTileStats struct { 15 | Total int 16 | CacheHit int 17 | CacheMiss int 18 | DBHit int 19 | DBMiss int 20 | } 21 | 22 | var gLanternTileStats LanternTileStats = LanternTileStats{} 23 | 24 | func TileInit( cache_csv_fn , db_fn string ) (e error) { 25 | //gTileCache,e = tile_cache.LoadCacheGob( cache_gob_fn ) 26 | gTileCache = &tile_cache.TileCache{} 27 | e = gTileCache.LoadTileIDMd5SumSeqCSV( cache_csv_fn ) 28 | 29 | if e!=nil { return e } 30 | gTileDBH,e = tile_dbh.OpenSqlite3( db_fn ) 31 | if e!=nil { return e } 32 | 33 | return nil 34 | } 35 | 36 | func TileSimpleInit() (e error) { 37 | //return TileInit( gTileCacheGob, gTileDB ) 38 | return TileInit( gTileCacheCSV , gTileDB ) 39 | } 40 | 41 | func GetTileSeq( tileid string ) (string,error) { 42 | gLanternTileStats.Total++ 43 | 44 | bseq,ok,err := gTileCache.GetSeq( tileid ) 45 | if err!=nil { return "",err } 46 | if ok { 47 | gLanternTileStats.CacheHit++ 48 | return string(bseq),nil 49 | } 50 | 51 | gLanternTileStats.CacheMiss++ 52 | 53 | seq,e := gTileDBH.GetSeqString( tileid ) 54 | if e!=nil { 55 | gLanternTileStats.DBMiss++ 56 | return "",e 57 | } 58 | 59 | gLanternTileStats.DBHit++ 60 | return seq,nil 61 | } 62 | 63 | func GetTileSeqDummy( tileid string ) (string,error) { 64 | gLanternTileStats.Total++ 65 | 66 | bseq,ok,err := gTileCache.GetSeqDummy( tileid ) 67 | if err!=nil { return "",err } 68 | if ok { 69 | gLanternTileStats.CacheHit++ 70 | return string(bseq),nil 71 | } 72 | 73 | gLanternTileStats.CacheMiss++ 74 | 75 | seq,e := gTileDBH.GetSeqStringDummy( tileid ) 76 | if e!=nil { 77 | gLanternTileStats.DBMiss++ 78 | return "",e 79 | } 80 | 81 | gLanternTileStats.DBHit++ 82 | return seq,nil 83 | } 84 | 85 | func TileStatsPrint() { 86 | fmt.Printf("Total:%d,CacheHit:%d,CacheMiss:%d,DBHit:%d,DBMiss:%d\n", 87 | gLanternTileStats.Total, 88 | gLanternTileStats.CacheHit, gLanternTileStats.CacheMiss, 89 | gLanternTileStats.DBHit, gLanternTileStats.DBMiss ) 90 | } 91 | -------------------------------------------------------------------------------- /experimental/lantern/lantern_tile_sequence.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "fmt" 4 | import "net/http" 5 | import "encoding/json" 6 | import "os" 7 | import "io" 8 | 9 | func tile_sequence_handler( w http.ResponseWriter, resp *LanternResponse, req *LanternRequest ) { 10 | 11 | seqmap := make( map[string]string ) 12 | 13 | error_count:=0 14 | 15 | for i:=0; i> %s\n", req.TileId[i]) 17 | 18 | seq,e := GetTileSeq( req.TileId[i] ) 19 | if e!=nil { 20 | error_count ++ 21 | if (error_count%1000)==0 { 22 | fmt.Fprintf( os.Stderr, "ERROR: error count %d. Latest error: tile_sequence_handler(%s): %v\n", error_count, req.TileId[i], e ) 23 | } 24 | //fmt.Fprintf( os.Stderr, "ERROR: tile_sequence_handler(%s): %v\n", req.TileId[i], e ) 25 | continue 26 | } 27 | 28 | seqmap[ req.TileId[i] ] = seq 29 | } 30 | 31 | //for k,v := range seqmap { fmt.Printf("%s %s\n", k, v[0:10]) } 32 | 33 | 34 | //DEBUG 35 | TileStatsPrint() 36 | 37 | 38 | resp.Type = "success" 39 | resp.Message = "tile-sequence" 40 | 41 | w.Header().Set("Content-Type", "application/json") 42 | res_json_bytes,_ := json.Marshal( seqmap ) 43 | 44 | io.WriteString(w, "{\n") 45 | io.WriteString(w, " \"Type\":\"success\", \"Message\":\"tile-sequence\",\n") 46 | io.WriteString(w, " \"Result\":") 47 | io.WriteString(w, string(res_json_bytes)) 48 | io.WriteString(w, "\n") 49 | io.WriteString(w, "}") 50 | 51 | } 52 | 53 | 54 | /* Do lookups without returning the actual sequence to test lookup speed 55 | */ 56 | func tile_sequence_handler_tracer( w http.ResponseWriter, resp *LanternResponse, req *LanternRequest ) { 57 | 58 | error_count := 0 59 | 60 | for i:=0; i[0-9a-f\.]+)/$', views.TileVariantQuery.as_view(), name="tile_variant_query"), 8 | url(r'^tile_variant_by_int/(?P[0-9]+)/$', views.TileVariantDetail.as_view(), name="tile_variant_query_by_int"), 9 | url(r'^loci/(?P[0-9a-f\.]+)/$', views.TileLocusAnnotationList.as_view(), name="locus_query"), 10 | url(r'^between_loci/$', views.PopulationVariantQueryBetweenLoci.as_view(), name="pop_between_loci"), 11 | url(r'^around_locus/$', views.PopulationVariantQueryAroundLocus.as_view(), name="pop_around_locus"), 12 | ] 13 | 14 | urlpatterns=format_suffix_patterns(urlpatterns) 15 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/api_gui/__init__.py -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # Register your models here. 4 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/forms.py: -------------------------------------------------------------------------------- 1 | from django import forms 2 | 3 | class AroundLocusForm(forms.Form): 4 | INDEX_CHOICES = ( 5 | (0, '0-indexed'), 6 | (1, '1-indexed'), 7 | ) 8 | def __init__(self, assembly_choices, chrom_choices, *args, **kwargs): 9 | super(AroundLocusForm, self).__init__(*args, **kwargs) 10 | self.fields['assembly'] = forms.ChoiceField(initial=assembly_choices[0][0], label='Assembly to use', 11 | choices=assembly_choices) 12 | self.fields['chromosome'] = forms.ChoiceField(label='Chromosome', choices=chrom_choices) 13 | 14 | self.fields['indexing'] = forms.ChoiceField(initial=self.INDEX_CHOICES[0][0], label='Indexing to use', 15 | widget=forms.RadioSelect, choices=self.INDEX_CHOICES) 16 | self.fields['target_base'] = forms.IntegerField(label='Locus to query:') 17 | self.fields['number_around'] = forms.IntegerField(initial=0,label='Number of bases around query to retrieve:') 18 | 19 | class BetweenLociForm(forms.Form): 20 | INDEX_CHOICES = ( 21 | (0, '0-indexed'), 22 | (1, '1-indexed'), 23 | ) 24 | def __init__(self, assembly_choices, chrom_choices, *args, **kwargs): 25 | super(BetweenLociForm, self).__init__(*args, **kwargs) 26 | self.fields['assembly'] = forms.ChoiceField(initial=assembly_choices[0][0], label='Assembly to use', 27 | choices=assembly_choices) 28 | self.fields['chromosome'] = forms.ChoiceField(label='Chromosome', choices=chrom_choices) 29 | 30 | self.fields['indexing'] = forms.ChoiceField(initial=self.INDEX_CHOICES[0][0], label='Indexing to use', 31 | widget=forms.RadioSelect, choices=self.INDEX_CHOICES) 32 | self.fields['lower_base'] = forms.IntegerField(label='Lower locus to start retrieving at:') 33 | self.fields['upper_base'] = forms.IntegerField(label='Upper locus to stop retrieving at (exclusive):') 34 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/api_gui/migrations/__init__.py -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/models.py: -------------------------------------------------------------------------------- 1 | from django.db import models 2 | 3 | # Create your models here. 4 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/tests.py: -------------------------------------------------------------------------------- 1 | from django.test import TestCase 2 | 3 | # Create your tests here. 4 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/api_gui/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import patterns, url 2 | 3 | from api_gui import views 4 | 5 | urlpatterns = patterns('', 6 | url(r'^around/$', views.around_locus_query_view, name='around_locus_form'), 7 | url(r'^between/$', views.between_loci_query_view, name='between_loci_form') 8 | ) 9 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/errors.py: -------------------------------------------------------------------------------- 1 | class MissingLocusError(Exception): 2 | """ 3 | Raised when the query given is valid, but no loci were found to match it 4 | """ 5 | def __init__(self, value): 6 | self.value = value 7 | def __str__(self): 8 | return repr(self.value) 9 | 10 | class LocusOutOfRangeException(Exception): 11 | def __init__(self, value): 12 | self.value = value 13 | def __str__(self): 14 | return repr(self.value) 15 | 16 | class EndOfChromosomeError(Exception): 17 | def __init__(self, value): 18 | self.value = value 19 | def __str__(self): 20 | return repr(self.value) 21 | 22 | class InvalidGenomeError(Exception): 23 | def __init__(self, value): 24 | self.value = value 25 | def __str__(self): 26 | return repr(self.value) 27 | 28 | class StatisticsException(Exception): 29 | pass 30 | 31 | class MissingStatisticsError(StatisticsException): 32 | def __init__(self, value): 33 | self.value = value 34 | def __str__(self): 35 | return repr(self.value) 36 | 37 | class ExistingStatisticsError(StatisticsException): 38 | def __init__(self, value): 39 | self.value = value 40 | def __str__(self): 41 | return repr(self.value) 42 | 43 | class EmptyPathError(Exception): 44 | def __init__(self, value): 45 | self.value = value 46 | def __str__(self): 47 | return repr(self.value) 48 | 49 | class TileLibraryValidationError(Exception): 50 | def __init__(self, value): 51 | self.value = value 52 | def __str__(self): 53 | return repr(self.value) 54 | 55 | class UnexpectedLanternBehaviorError(Exception): 56 | def __init__(self, value): 57 | self.value = value 58 | def __str__(self): 59 | return repr(self.value) 60 | 61 | class CGFTranslatorError(Exception): 62 | def __init__(self, value): 63 | self.value = value 64 | def __str__(self): 65 | return repr(self.value) 66 | 67 | class ParseException(Exception): 68 | pass 69 | 70 | class NoteParseError(ParseException): 71 | def __init__(self, value): 72 | self.value = value 73 | def __str__(self): 74 | return repr(self.value) 75 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/lightning/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/lightning/__init__.py -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/lightning/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf.urls import patterns, include, url 2 | from django.contrib import admin 3 | 4 | urlpatterns = patterns('', 5 | url(r'^$', 'lightning.views.home', name='home'), 6 | url(r'^help/$', 'lightning.views.help', name='help'), 7 | url(r'^query/', include('api_gui.urls', namespace='population_sequence_query')), 8 | url(r'^api/', include('api.urls', namespace='api')), 9 | ) 10 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/lightning/views.py: -------------------------------------------------------------------------------- 1 | import urllib 2 | 3 | from django.shortcuts import render 4 | from django.http import HttpResponseRedirect, HttpResponse 5 | from django.core.urlresolvers import reverse 6 | 7 | from api_gui.forms import AroundLocusForm, BetweenLociForm 8 | from tile_library.models import TileLocusAnnotation, GenomeStatistic, TileVariant 9 | from tile_library.constants import SUPPORTED_ASSEMBLY_CHOICES, CHR_CHOICES 10 | 11 | def home(request): 12 | """ 13 | Display homepage 14 | """ 15 | if request.GET.get('assembly') != None: 16 | #We were asked for something! 17 | data = request.GET 18 | GET_url_section = urllib.urlencode(data) 19 | if 'target_base' in data: 20 | return HttpResponseRedirect(request.build_absolute_uri(reverse('population_sequence_query:around_locus_form')+'?'+GET_url_section)) 21 | elif 'lower_base' in data: 22 | return HttpResponseRedirect(request.build_absolute_uri(reverse('population_sequence_query:between_loci_form')+'?'+GET_url_section)) 23 | assembly_converter = dict(SUPPORTED_ASSEMBLY_CHOICES) 24 | chrom_converter = dict(CHR_CHOICES) 25 | possible_assemblies_int = TileLocusAnnotation.objects.order_by( 26 | 'assembly').distinct('assembly').values_list('assembly', flat=True) 27 | possible_chromosomes_int = TileLocusAnnotation.objects.order_by( 28 | 'chromosome').distinct('chromosome').values_list('chromosome', flat=True) 29 | possible_assemblies = [(i, assembly_converter[i]) for i in possible_assemblies_int] 30 | possible_chromosomes = [(i, chrom_converter[i]) for i in possible_chromosomes_int] 31 | query_around_form=AroundLocusForm(possible_assemblies, possible_chromosomes) 32 | query_between_form=BetweenLociForm(possible_assemblies, possible_chromosomes) 33 | return render(request, 'lightning/index.html', {'query_around_form':query_around_form, 'query_between_form':query_between_form}) 34 | 35 | def help(request): 36 | """ 37 | Display Questions 38 | """ 39 | return render(request, 'lightning/help.html') 40 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/lightning/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for lightning project. 3 | 4 | It exposes the WSGI callable as a module-level variable named ``application``. 5 | 6 | For more information on this file, see 7 | https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ 8 | """ 9 | 10 | import os 11 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lightning.settings") 12 | 13 | from django.core.wsgi import get_wsgi_application 14 | application = get_wsgi_application() 15 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | if __name__ == "__main__": 6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lightning.settings") 7 | 8 | from django.core.management import execute_from_command_line 9 | 10 | execute_from_command_line(sys.argv) 11 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/static/bootstrap-3.2.0-dist/fonts/glyphicons-halflings-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/static/bootstrap-3.2.0-dist/fonts/glyphicons-halflings-regular.eot -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/static/bootstrap-3.2.0-dist/fonts/glyphicons-halflings-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/static/bootstrap-3.2.0-dist/fonts/glyphicons-halflings-regular.ttf -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/static/bootstrap-3.2.0-dist/fonts/glyphicons-halflings-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/static/bootstrap-3.2.0-dist/fonts/glyphicons-halflings-regular.woff -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/templates/api_gui/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% load staticfiles %} 3 | {% load bootstrap %} 4 | {% load humanize %} 5 | 6 | {% block title %}Lightning: Population Sequence Query{% endblock %} 7 | 8 | {% block breadcrumbs %} 9 |
  • {{form_name}}
  • 10 | {% endblock %} 11 | 12 | {% block content %} 13 |
    14 | 20 |
    21 | {{ form|bootstrap }} 22 | 23 |
    24 | {% if response %} 25 |

    Response:

    26 |

    {{ response.text|linebreaks }} This query took {{response.time|floatformat:3}} seconds.

    27 | {% if response.humans %} 28 |
    29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | {% for human in response.humans %} 40 | 41 | 42 | 43 | 44 | 45 | 46 | {% endfor %} 47 | 48 |
    Sample NamePhase A callPhase B callPhase groups known?
    {{ human.human_name }}{{ human.phase_A_sequence }}{{ human.phase_B_sequence }}{{ human.phased|yesno }}
    49 |
    50 | {% endif %} 51 | {% endif %} 52 |
    53 | {% endblock %} 54 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/templates/lightning/help.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% load staticfiles %} 3 | 4 | {% block title %}Lightning: Fast Interaction with Human Genomic Data{% endblock %} 5 | 6 | {% block breadcrumbs %} 7 |
  • Help
  • 8 | {% endblock %} 9 | 10 | {% block content %} 11 |
    12 | 15 |

    This site is a work in progress and is open-source. 16 | Please contact us here with questions, 17 | unexpected behavior reports, and feature requests. 18 |

    19 |
    20 |

    Why do some pages take a long time to load?

    21 |

    This is a prototype implementation of Lightning, so some queries take longer than expected. We are working on improving implementation.

    22 |
    23 |

    Why do some pages freeze my browser?

    24 |

    Our API do not page results, so if a user requests over 5,000 or so bases, 5,000 bases * 174 people will be loaded into the window. 25 | This amount of information transfer will freeze a browser. We suggest interacting directly with the api to retrieve that many bases. 26 |

    27 |
    28 |

    0-indexed or 1-indexed?

    29 |

    Our API's support both, but it is 0-indexed by default.

    30 |
    31 |

    I have another question that isn't here.

    32 |

    Please contact us here!

    33 | 34 |
    35 | {% endblock %} 36 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/templates/lightning/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% load staticfiles %} 3 | {% load bootstrap %} 4 | 5 | {% block title %}Lightning: Fast Interaction the Genomic Data{% endblock %} 6 | 7 | {% block content %} 8 |
    9 | 12 |

    This site is a work in progress and is open-source. 13 | Please contact us here with questions, 14 | unexpected behavior reports, and feature requests. 15 |

    16 |
    17 |

    Our goals include enabling fast analysis of population genetic data.

    18 |
    19 |
    20 |
    21 |

    Retrieve Population Sequences

    22 |
    23 |
    24 |

    25 |
    26 |
    Find k-bases around a specifed locus
    27 |
    28 |
    29 | {{ query_around_form|bootstrap }} 30 | 31 |
    32 |
    33 |
    34 |
    35 |
    Find bases between a lower locus and upper locus
    36 |
    37 |
    38 | {{ query_between_form|bootstrap }} 39 | 40 |
    41 |
    42 |
    43 |
    44 |
    45 |
    46 | {% endblock %} 47 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/tile_library/__init__.py -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | 3 | # We only want the annotations to be modifiable by the admin, not the tiles 4 | from tile_library.models import Tile, TileVariant, TileLocusAnnotation, GenomeVariant 5 | 6 | class GenomeVariantAdmin(admin.ModelAdmin): 7 | list_display = ('__unicode__', 'chromosome_int', 'locus_start_int', 'created', 'last_modified') 8 | list_filter = ['last_modified'] 9 | search_fields = ['names', 'info'] 10 | extra = 0 11 | 12 | 13 | class TileLocusAdmin(admin.ModelAdmin): 14 | list_display = ('__unicode__', 'assembly_int', 'chromosome_int') 15 | list_filter = ['assembly_int', 'chromosome_int'] 16 | extra = 0 17 | can_delete=False 18 | 19 | class TileVarAdmin(admin.ModelAdmin): 20 | list_display = ('__unicode__', 'length', 'md5sum') 21 | extra = 0 22 | #It would be nice to customize whether the tags are collapsed if the tags are actually different 23 | fieldsets = [ 24 | (None, {'fields':['length', 'variant_value', 'md5sum']}), 25 | ('Sequence', {'fields':['sequence']}), 26 | ('Tags', {'fields':['start_tag', 'end_tag']}), 27 | ] 28 | can_delete=False 29 | 30 | class TileAdmin(admin.ModelAdmin): 31 | list_display = ('get_string', 'created') 32 | can_delete=False 33 | 34 | admin.site.register(Tile, TileAdmin) 35 | admin.site.register(TileVariant, TileVarAdmin) 36 | admin.site.register(TileLocusAnnotation, TileLocusAdmin) 37 | admin.site.register(GenomeVariant, GenomeVariantAdmin) 38 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/human_readable_functions.py: -------------------------------------------------------------------------------- 1 | from tile_library.constants import CHR_CHOICES, CHR_OTHER, SUPPORTED_ASSEMBLY_CHOICES, STATISTICS_TYPE_CHOICES, PATH 2 | 3 | chrom_index = [i for i,j in CHR_CHOICES] 4 | assembly_index = [i for i,j in SUPPORTED_ASSEMBLY_CHOICES] 5 | stat_index = [i for i,j in STATISTICS_TYPE_CHOICES] 6 | 7 | def get_readable_genome_statistics_name(statistic_type, path=-1): 8 | if type(statistics_type) != int: 9 | raise TypeError("Expects statistics_type to be of type int") 10 | if statistics_type == PATH: 11 | return "%s %s" % (SUPPORTED_STATISTICS_TYPE_CHOICES[stat_index.index(statistic_type)][1], hex(path).lstrip('0x').zfill(1)) 12 | return SUPPORTED_STATISTICS_TYPE_CHOICES[stat_index.index(statistic_type)][1] 13 | 14 | def get_readable_chr_name(chromosome_int, alt_chromosome_name): 15 | if type(chromosome_int) != int: 16 | raise TypeError("Expects chromosome int to be of type int") 17 | if chromosome_int == CHR_OTHER: 18 | return str(alternate_chromosome_name) 19 | else: 20 | return CHR_CHOICES[chrom_index.index(chromosome_int)][1] 21 | 22 | def get_readable_assembly_name(assembly_int): 23 | if type(assembly_int) != int: 24 | raise TypeError("Expects assembly int to be of type int") 25 | return SUPPORTED_ASSEMBLY_CHOICES[assembly_index.index(assembly_int)][1] 26 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0002_auto_20141002_1101.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0001_initial'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterModelOptions( 15 | name='tilelocusannotation', 16 | options={'ordering': ['tile']}, 17 | ), 18 | migrations.AlterModelOptions( 19 | name='tilevariant', 20 | options={'ordering': ['tile_variant_name']}, 21 | ), 22 | migrations.AlterModelOptions( 23 | name='varannotation', 24 | options={'ordering': ['tile_variant']}, 25 | ), 26 | ] 27 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0003_auto_20141009_1345.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0002_auto_20141002_1101'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='tile', 16 | name='tilename', 17 | field=models.BigIntegerField(serialize=False, editable=False, primary_key=True, db_index=True), 18 | ), 19 | migrations.AlterField( 20 | model_name='tilelocusannotation', 21 | name='assembly', 22 | field=models.PositiveSmallIntegerField(db_index=True, choices=[(16, b'NCBI34/hg16'), (17, b'NCBI35/hg17'), (18, b'NCBI36/hg18'), (19, b'GRCh37/hg19'), (38, b'GRCh38/hg38')]), 23 | ), 24 | migrations.AlterField( 25 | model_name='tilelocusannotation', 26 | name='begin_int', 27 | field=models.PositiveIntegerField(db_index=True), 28 | ), 29 | migrations.AlterField( 30 | model_name='tilelocusannotation', 31 | name='chromosome', 32 | field=models.PositiveSmallIntegerField(db_index=True, choices=[(1, b'chr1'), (2, b'chr2'), (3, b'chr3'), (4, b'chr4'), (5, b'chr5'), (6, b'chr6'), (7, b'chr7'), (8, b'chr8'), (9, b'chr9'), (10, b'chr10'), (11, b'chr11'), (12, b'chr12'), (13, b'chr13'), (14, b'chr14'), (15, b'chr15'), (16, b'chr16'), (17, b'chr17'), (18, b'chr18'), (19, b'chr19'), (20, b'chr20'), (21, b'chr21'), (22, b'chr22'), (23, b'chrX'), (24, b'chrY'), (25, b'chrM'), (26, b'Other')]), 33 | ), 34 | migrations.AlterField( 35 | model_name='tilelocusannotation', 36 | name='end_int', 37 | field=models.PositiveIntegerField(db_index=True), 38 | ), 39 | migrations.AlterField( 40 | model_name='tilevariant', 41 | name='length', 42 | field=models.PositiveIntegerField(db_index=True), 43 | ), 44 | migrations.AlterField( 45 | model_name='tilevariant', 46 | name='tile_variant_name', 47 | field=models.BigIntegerField(serialize=False, editable=False, primary_key=True, db_index=True), 48 | ), 49 | migrations.AlterField( 50 | model_name='tilevariant', 51 | name='variant_value', 52 | field=models.PositiveIntegerField(db_index=True), 53 | ), 54 | ] 55 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0004_genomestatistic.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0003_auto_20141009_1345'), 11 | ] 12 | 13 | operations = [ 14 | migrations.CreateModel( 15 | name='GenomeStatistic', 16 | fields=[ 17 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 18 | ('statistics_type', models.PositiveSmallIntegerField()), 19 | ('position_num', models.BigIntegerField()), 20 | ('tile_num', models.BigIntegerField()), 21 | ('avg_variant_val', models.PositiveIntegerField()), 22 | ('max_variant_val', models.PositiveIntegerField()), 23 | ('min_length', models.PositiveIntegerField()), 24 | ('avg_length', models.PositiveIntegerField()), 25 | ('max_length', models.PositiveIntegerField()), 26 | ('avg_annotations_per_position', models.PositiveIntegerField(null=True, blank=True)), 27 | ('max_annotations_per_position', models.PositiveIntegerField(null=True, blank=True)), 28 | ('avg_annotations_per_tile', models.PositiveIntegerField(null=True, blank=True)), 29 | ('max_annotations_per_tile', models.PositiveIntegerField(null=True, blank=True)), 30 | ], 31 | options={ 32 | }, 33 | bases=(models.Model,), 34 | ), 35 | ] 36 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0005_auto_20141010_0933.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0004_genomestatistic'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomestatistic', 16 | name='avg_length', 17 | field=models.PositiveIntegerField(null=True), 18 | ), 19 | migrations.AlterField( 20 | model_name='genomestatistic', 21 | name='avg_variant_val', 22 | field=models.PositiveIntegerField(null=True), 23 | ), 24 | migrations.AlterField( 25 | model_name='genomestatistic', 26 | name='max_length', 27 | field=models.PositiveIntegerField(null=True), 28 | ), 29 | migrations.AlterField( 30 | model_name='genomestatistic', 31 | name='max_variant_val', 32 | field=models.PositiveIntegerField(null=True), 33 | ), 34 | migrations.AlterField( 35 | model_name='genomestatistic', 36 | name='min_length', 37 | field=models.PositiveIntegerField(null=True), 38 | ), 39 | migrations.AlterField( 40 | model_name='genomestatistic', 41 | name='statistics_type', 42 | field=models.PositiveSmallIntegerField(db_index=True), 43 | ), 44 | ] 45 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0006_auto_20141010_1100.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0005_auto_20141010_0933'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomestatistic', 16 | name='avg_annotations_per_position', 17 | field=models.DecimalField(null=True, max_digits=15, decimal_places=3, blank=True), 18 | ), 19 | migrations.AlterField( 20 | model_name='genomestatistic', 21 | name='avg_annotations_per_tile', 22 | field=models.DecimalField(null=True, max_digits=15, decimal_places=3, blank=True), 23 | ), 24 | migrations.AlterField( 25 | model_name='genomestatistic', 26 | name='avg_length', 27 | field=models.DecimalField(null=True, max_digits=15, decimal_places=3), 28 | ), 29 | migrations.AlterField( 30 | model_name='genomestatistic', 31 | name='avg_variant_val', 32 | field=models.DecimalField(null=True, max_digits=15, decimal_places=3), 33 | ), 34 | ] 35 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0007_genomestatistic_path_name.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0006_auto_20141010_1100'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AddField( 15 | model_name='genomestatistic', 16 | name='path_name', 17 | field=models.PositiveIntegerField(db_index=True, null=True, blank=True), 18 | preserve_default=True, 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0008_auto_20141017_1402.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0007_genomestatistic_path_name'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterUniqueTogether( 15 | name='tilelocusannotation', 16 | unique_together=set([('tile', 'assembly')]), 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0009_auto_20141020_1808.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0008_auto_20141017_1402'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterUniqueTogether( 15 | name='genomestatistic', 16 | unique_together=set([('statistics_type', 'path_name')]), 17 | ), 18 | ] 19 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0011_auto_20141103_1546.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import django.core.validators 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0010_auto_20141103_1533'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='genomevariant', 17 | name='names', 18 | field=models.TextField(help_text=b'Tab-separated aliases for this variant (rsID tags, RefSNP id, etc.', blank=True, validators=[django.core.validators.RegexValidator(regex=b'[\\S+\t]*', message=b'Not tab-separated')]), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0012_auto_20141103_1548.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0011_auto_20141103_1546'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='names', 17 | field=models.TextField(help_text=b'Tab-separated aliases for this variant (rsID tags, RefSNP id, etc.', blank=True), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0013_auto_20141104_1715.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0012_auto_20141103_1548'), 11 | ] 12 | 13 | operations = [ 14 | migrations.RemoveField( 15 | model_name='genomevariant', 16 | name='tile_variants', 17 | ), 18 | migrations.DeleteModel( 19 | name='GenomeVariant', 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0015_auto_20141104_1726.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0014_auto_20141104_1716'), 11 | ] 12 | 13 | operations = [ 14 | migrations.RemoveField( 15 | model_name='genomevariant', 16 | name='tile_variants', 17 | ), 18 | migrations.RemoveField( 19 | model_name='translation', 20 | name='genome_variant', 21 | ), 22 | migrations.DeleteModel( 23 | name='GenomeVariant', 24 | ), 25 | migrations.RemoveField( 26 | model_name='translation', 27 | name='tile_variant', 28 | ), 29 | migrations.DeleteModel( 30 | name='Translation', 31 | ), 32 | ] 33 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0017_auto_20141106_1306.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0016_auto_20141104_1727'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='id', 17 | field=models.BigIntegerField(serialize=False, editable=False, primary_key=True), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0019_auto_20141107_1843.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0018_auto_20141107_1550'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterModelOptions( 15 | name='genomevariant', 16 | options={'ordering': ['start_tile_position', 'start_increment']}, 17 | ), 18 | migrations.AlterField( 19 | model_name='genomevariant', 20 | name='end_tile_position', 21 | field=models.ForeignKey(related_name=b'genome_variants_ending', to='tile_library.Tile'), 22 | ), 23 | migrations.AlterField( 24 | model_name='genomevariant', 25 | name='start_tile_position', 26 | field=models.ForeignKey(related_name=b'genome_variants_starting', to='tile_library.Tile'), 27 | ), 28 | ] 29 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0020_auto_20141107_2040.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0019_auto_20141107_1843'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='genomevariant', 17 | name='info', 18 | field=models.TextField(help_text=b"Json-formatted. Known keys are 'source': [what generated the variant], 'phenotype': [phenotypes associated with this annotation], 'amino_acid': [predicted amino-acid changes], 'ucsc_trans': [UCSC translation (picked up from GFF files), and 'other': [Other GFF-file related annotations]", validators=[tile_library.models.validate_json]), 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0021_auto_20141110_1327.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0020_auto_20141107_2040'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='tilevariant', 16 | name='tile', 17 | field=models.ForeignKey(related_name=b'tile_variants', to='tile_library.Tile'), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0022_auto_20141110_1750.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0021_auto_20141110_1327'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='end_tile_position', 17 | field=models.BigIntegerField(db_index=True), 18 | ), 19 | migrations.AlterField( 20 | model_name='genomevariant', 21 | name='start_tile_position', 22 | field=models.BigIntegerField(db_index=True), 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0023_auto_20141110_1807.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0022_auto_20141110_1750'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='start_tile_position', 17 | field=models.ForeignKey(to='tile_library.Tile'), 18 | ), 19 | migrations.AlterField( 20 | model_name='genomevarianttranslation', 21 | name='genome_variant', 22 | field=models.ForeignKey(related_name=b'translation_to_tilevariant', to='tile_library.GenomeVariant'), 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0024_auto_20141110_1808.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0023_auto_20141110_1807'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='start_tile_position', 17 | field=models.ForeignKey(related_name=b'genome_variants', to='tile_library.Tile'), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0025_auto_20141110_1815.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0024_auto_20141110_1808'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='start_tile_position', 17 | field=models.ForeignKey(related_name=b'genome_variants_per_tile', to='tile_library.Tile'), 18 | ), 19 | ] 20 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0026_auto_20141110_1923.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0025_auto_20141110_1815'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='end_tile_position', 17 | field=models.ForeignKey(related_name=b'ending_genome_variants', to='tile_library.Tile'), 18 | ), 19 | migrations.AlterField( 20 | model_name='genomevariant', 21 | name='start_tile_position', 22 | field=models.ForeignKey(related_name=b'starting_genome_variants', to='tile_library.Tile'), 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0027_tilevariant_conversion_to_cgf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0026_auto_20141110_1923'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AddField( 15 | model_name='tilevariant', 16 | name='conversion_to_cgf', 17 | field=models.TextField(default=b''), 18 | preserve_default=True, 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0028_auto_20141210_1126.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0027_tilevariant_conversion_to_cgf'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='genomevariant', 17 | name='info', 18 | field=models.TextField(help_text=b"Json-formatted. Known keys are 'source': [what generated the variant], 'phenotype': [phenotypes associated with this annotation], 'amino_acid': [predicted amino-acid changes], 'ucsc_trans': [UCSC translation (picked up from GFF files), and 'other': [Other GFF-file related annotations]", db_index=True, validators=[tile_library.models.validate_json]), 19 | preserve_default=True, 20 | ), 21 | migrations.AlterUniqueTogether( 22 | name='genomevarianttranslation', 23 | unique_together=set([('tile_variant', 'genome_variant')]), 24 | ), 25 | ] 26 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0029_auto_20150129_1416.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0028_auto_20141210_1126'), 11 | ] 12 | 13 | operations = [ 14 | migrations.RemoveField( 15 | model_name='genomestatistic', 16 | name='avg_annotations_per_position', 17 | ), 18 | migrations.RemoveField( 19 | model_name='genomestatistic', 20 | name='avg_annotations_per_tile', 21 | ), 22 | migrations.RemoveField( 23 | model_name='genomestatistic', 24 | name='avg_length', 25 | ), 26 | migrations.RemoveField( 27 | model_name='genomestatistic', 28 | name='avg_num_positions_spanned', 29 | ), 30 | migrations.RemoveField( 31 | model_name='genomestatistic', 32 | name='avg_variant_val', 33 | ), 34 | migrations.RemoveField( 35 | model_name='genomestatistic', 36 | name='max_annotations_per_position', 37 | ), 38 | migrations.RemoveField( 39 | model_name='genomestatistic', 40 | name='max_annotations_per_tile', 41 | ), 42 | migrations.RemoveField( 43 | model_name='genomestatistic', 44 | name='max_length', 45 | ), 46 | migrations.RemoveField( 47 | model_name='genomestatistic', 48 | name='max_variant_val', 49 | ), 50 | migrations.RemoveField( 51 | model_name='genomestatistic', 52 | name='min_length', 53 | ), 54 | ] 55 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0030_auto_20150130_1750.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0029_auto_20150129_1416'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='tile', 17 | name='end_tag', 18 | field=models.CharField(max_length=24, validators=[tile_library.models.validate_tag]), 19 | preserve_default=True, 20 | ), 21 | migrations.AlterField( 22 | model_name='tile', 23 | name='start_tag', 24 | field=models.CharField(max_length=24, validators=[tile_library.models.validate_tag]), 25 | preserve_default=True, 26 | ), 27 | migrations.AlterField( 28 | model_name='tile', 29 | name='tilename', 30 | field=models.BigIntegerField(db_index=True, serialize=False, editable=False, primary_key=True, validators=[tile_library.models.validate_tile_position_int]), 31 | preserve_default=True, 32 | ), 33 | migrations.AlterField( 34 | model_name='tilevariant', 35 | name='tile_variant_name', 36 | field=models.BigIntegerField(db_index=True, serialize=False, editable=False, primary_key=True, validators=[tile_library.models.validate_tile_variant_int]), 37 | preserve_default=True, 38 | ), 39 | ] 40 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0031_auto_20150202_1042.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0030_auto_20150130_1750'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='tilevariant', 16 | name='conversion_to_cgf', 17 | field=models.TextField(default=b'', blank=True), 18 | preserve_default=True, 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0032_auto_20150202_1227.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0031_auto_20150202_1042'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='tilevariant', 17 | name='end_tag', 18 | field=models.CharField(default=b'', max_length=24, blank=True), 19 | preserve_default=True, 20 | ), 21 | migrations.AlterField( 22 | model_name='tilevariant', 23 | name='num_positions_spanned', 24 | field=models.PositiveSmallIntegerField(validators=[tile_library.models.validate_num_spanning_tiles]), 25 | preserve_default=True, 26 | ), 27 | migrations.AlterField( 28 | model_name='tilevariant', 29 | name='start_tag', 30 | field=models.CharField(default=b'', max_length=24, blank=True), 31 | preserve_default=True, 32 | ), 33 | ] 34 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0033_auto_20150202_1339.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0032_auto_20150202_1227'), 11 | ] 12 | 13 | operations = [ 14 | migrations.RenameField( 15 | model_name='genomestatistic', 16 | old_name='position_num', 17 | new_name='num_of_positions', 18 | ), 19 | migrations.RenameField( 20 | model_name='genomestatistic', 21 | old_name='tile_num', 22 | new_name='num_of_tiles', 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0034_auto_20150202_1349.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0033_auto_20150202_1339'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomestatistic', 16 | name='max_num_positions_spanned', 17 | field=models.PositiveIntegerField(null=True, blank=True), 18 | preserve_default=True, 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0035_auto_20150202_1616.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0034_auto_20150202_1349'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='genomestatistic', 17 | name='max_num_positions_spanned', 18 | field=models.PositiveIntegerField(blank=True, null=True, validators=[tile_library.models.validate_num_spanning_tiles]), 19 | preserve_default=True, 20 | ), 21 | migrations.AlterField( 22 | model_name='genomestatistic', 23 | name='num_of_positions', 24 | field=models.BigIntegerField(validators=[tile_library.models.validate_positive]), 25 | preserve_default=True, 26 | ), 27 | migrations.AlterField( 28 | model_name='genomestatistic', 29 | name='num_of_tiles', 30 | field=models.BigIntegerField(validators=[tile_library.models.validate_positive]), 31 | preserve_default=True, 32 | ), 33 | ] 34 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0036_auto_20150202_1653.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0035_auto_20150202_1616'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='genomestatistic', 17 | name='path_name', 18 | field=models.IntegerField(default=-1, db_index=True), 19 | preserve_default=True, 20 | ), 21 | ] 22 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0037_auto_20150202_1749.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0036_auto_20150202_1653'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='tilelocusannotation', 16 | name='chromosome_name', 17 | field=models.CharField(max_length=100, blank=True), 18 | preserve_default=True, 19 | ), 20 | ] 21 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0040_auto_20150223_1333.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0039_auto_20150220_1417'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='genomestatistic', 17 | name='path_name', 18 | field=models.IntegerField(default=-1, db_index=True), 19 | preserve_default=True, 20 | ), 21 | migrations.AlterField( 22 | model_name='genomevariant', 23 | name='info', 24 | field=models.TextField(help_text=b"Json-formatted. Known keys are 'source': [what generated the variant], 'phenotype': [phenotypes associated with this annotation], 'amino_acid': [predicted amino-acid changes], 'ucsc_trans': [UCSC translation (picked up from GFF files), and 'other': [Other GFF-file related annotations]", blank=True, db_index=True, validators=[tile_library.models.validate_json]), 25 | preserve_default=True, 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0041_auto_20150224_1826.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0040_auto_20150223_1333'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterField( 15 | model_name='genomevariant', 16 | name='names', 17 | field=models.TextField(help_text=b'Tab-separated aliases for this variant (rsID tags, RefSNP id, etc.', db_index=True, blank=True), 18 | preserve_default=True, 19 | ), 20 | migrations.AlterField( 21 | model_name='genomevarianttranslation', 22 | name='genome_variant', 23 | field=models.ForeignKey(related_name='translations_to_tile_variant', to='tile_library.GenomeVariant'), 24 | preserve_default=True, 25 | ), 26 | migrations.AlterField( 27 | model_name='genomevarianttranslation', 28 | name='tile_variant', 29 | field=models.ForeignKey(related_name='translations_to_genome_variant', to='tile_library.TileVariant'), 30 | preserve_default=True, 31 | ), 32 | ] 33 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0042_auto_20150227_1931.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0041_auto_20150224_1826'), 11 | ] 12 | 13 | operations = [ 14 | migrations.AlterUniqueTogether( 15 | name='genomevariant', 16 | unique_together=set([('assembly_int', 'chromosome_int', 'alternate_chromosome_name', 'locus_start_int', 'locus_end_int', 'alternate_bases')]), 17 | ), 18 | migrations.AlterUniqueTogether( 19 | name='tilevariant', 20 | unique_together=set([('tile', 'md5sum')]), 21 | ), 22 | ] 23 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0043_auto_20150304_1149.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | import django.core.validators 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | dependencies = [ 12 | ('tile_library', '0042_auto_20150227_1931'), 13 | ] 14 | 15 | operations = [ 16 | migrations.CreateModel( 17 | name='LanternTranslator', 18 | fields=[ 19 | ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), 20 | ('lantern_name', models.TextField(validators=[django.core.validators.RegexValidator(regex=b'^([0-9a-f]{3}\\.[0-9a-f]{2}\\.[0-9a-f]{4})\\.[0-9a-f]{4}(?:\\+([0-9a-f]+)$|$)', message=b'Not a valid lantern name format (specified in tile_library.constants.LANTERN_NAME_FORMAT_STRING)')])), 21 | ('tile_library_access_point', models.TextField()), 22 | ('tile_variant_int', models.BigIntegerField(db_index=True, validators=[tile_library.models.validate_tile_variant_int])), 23 | ('created', models.DateTimeField(auto_now_add=True)), 24 | ('last_modified', models.DateTimeField(auto_now=True)), 25 | ], 26 | options={ 27 | }, 28 | bases=(models.Model,), 29 | ), 30 | migrations.AlterField( 31 | model_name='tilevariant', 32 | name='md5sum', 33 | field=models.CharField(max_length=32), 34 | preserve_default=True, 35 | ), 36 | migrations.AlterField( 37 | model_name='tilevariant', 38 | name='sequence', 39 | field=models.TextField(validators=[django.core.validators.RegexValidator(regex=b'[ACGTN]+', message=b'Not a valid sequence, must be uppercase, and can only include A,C,G,T, or N.')]), 40 | preserve_default=True, 41 | ), 42 | ] 43 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0044_auto_20150304_1415.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | 6 | 7 | class Migration(migrations.Migration): 8 | 9 | dependencies = [ 10 | ('tile_library', '0043_auto_20150304_1149'), 11 | ] 12 | 13 | operations = [ 14 | migrations.RemoveField( 15 | model_name='lanterntranslator', 16 | name='tile_library_access_point', 17 | ), 18 | migrations.AddField( 19 | model_name='lanterntranslator', 20 | name='tile_library_host', 21 | field=models.TextField(default=b'', blank=True), 22 | preserve_default=True, 23 | ), 24 | ] 25 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0045_auto_20150304_1716.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import django.core.validators 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0044_auto_20150304_1415'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterModelOptions( 16 | name='lanterntranslator', 17 | options={'ordering': ['lantern_name']}, 18 | ), 19 | migrations.AlterField( 20 | model_name='lanterntranslator', 21 | name='lantern_name', 22 | field=models.TextField(unique=True, validators=[django.core.validators.RegexValidator(regex=b'^([0-9a-f]{3}\\.[0-9a-f]{2}\\.[0-9a-f]{4})\\.[0-9a-f]{4}(?:\\+([0-9a-f]+)$|$)', message=b'Not a valid lantern name format (specified in tile_library.constants.LANTERN_NAME_FORMAT_STRING)')]), 23 | preserve_default=True, 24 | ), 25 | migrations.AlterUniqueTogether( 26 | name='lanterntranslator', 27 | unique_together=set([('tile_variant_int', 'tile_library_host')]), 28 | ), 29 | ] 30 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0046_auto_20150311_1637.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | import django.core.validators 7 | 8 | 9 | class Migration(migrations.Migration): 10 | 11 | dependencies = [ 12 | ('tile_library', '0045_auto_20150304_1716'), 13 | ] 14 | 15 | operations = [ 16 | migrations.AddField( 17 | model_name='tile', 18 | name='is_end_of_path', 19 | field=models.BooleanField(default=False, editable=False), 20 | preserve_default=True, 21 | ), 22 | migrations.AddField( 23 | model_name='tile', 24 | name='is_start_of_path', 25 | field=models.BooleanField(default=False, editable=False), 26 | preserve_default=True, 27 | ), 28 | migrations.AlterField( 29 | model_name='tilevariant', 30 | name='end_tag', 31 | field=models.CharField(default=b'', max_length=24, blank=True, validators=[tile_library.models.validate_tag]), 32 | preserve_default=True, 33 | ), 34 | migrations.AlterField( 35 | model_name='tilevariant', 36 | name='sequence', 37 | field=models.TextField(validators=[django.core.validators.RegexValidator(regex=b'[acgtn]+', message=b'Not a valid sequence, must be lowercase, and can only include a,c,g,t, or n.')]), 38 | preserve_default=True, 39 | ), 40 | migrations.AlterField( 41 | model_name='tilevariant', 42 | name='start_tag', 43 | field=models.CharField(default=b'', max_length=24, blank=True, validators=[tile_library.models.validate_tag]), 44 | preserve_default=True, 45 | ), 46 | ] 47 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/0047_auto_20150311_1653.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import unicode_literals 3 | 4 | from django.db import models, migrations 5 | import tile_library.models 6 | 7 | 8 | class Migration(migrations.Migration): 9 | 10 | dependencies = [ 11 | ('tile_library', '0046_auto_20150311_1637'), 12 | ] 13 | 14 | operations = [ 15 | migrations.AlterField( 16 | model_name='tile', 17 | name='end_tag', 18 | field=models.CharField(blank=True, max_length=24, validators=[tile_library.models.validate_tag]), 19 | preserve_default=True, 20 | ), 21 | migrations.AlterField( 22 | model_name='tile', 23 | name='start_tag', 24 | field=models.CharField(blank=True, max_length=24, validators=[tile_library.models.validate_tag]), 25 | preserve_default=True, 26 | ), 27 | ] 28 | -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/migrations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/tile_library/migrations/__init__.py -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library/test_scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/tile_library/test_scripts/__init__.py -------------------------------------------------------------------------------- /experimental/pylightweb/lightning/tile_library_generation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/pylightweb/lightning/tile_library_generation/__init__.py -------------------------------------------------------------------------------- /experimental/recache/recache.go: -------------------------------------------------------------------------------- 1 | package recache 2 | 3 | import "regexp" 4 | import "io" 5 | 6 | //import "fmt" 7 | 8 | var Cache map[string]*regexp.Regexp 9 | 10 | func Compile( expr string ) (re *regexp.Regexp, err error) { 11 | if Cache == nil { 12 | Cache = make( map[string]*regexp.Regexp ) 13 | } 14 | if Cache[expr] != nil { 15 | return Cache[expr] , nil 16 | } 17 | Cache[expr],err = regexp.Compile( expr ) 18 | return Cache[expr], err 19 | } 20 | 21 | //-- 22 | 23 | func MatchString( pattern string, s string) (matched bool, err error) { 24 | r,err := Compile(pattern) 25 | if err != nil { return false, err } 26 | return r.MatchString(s), err 27 | } 28 | 29 | func Match( pattern string, b []byte) (matched bool, err error) { 30 | r,err := Compile(pattern) 31 | if err != nil { return false, err } 32 | return r.Match(b), err 33 | } 34 | 35 | func MatchReader( pattern string, ior io.RuneReader) (matched bool, err error) { 36 | r,err := Compile(pattern) 37 | if err != nil { return false, err } 38 | return r.MatchReader(ior), err 39 | } 40 | 41 | //-- 42 | 43 | 44 | func FindAllStringSubmatch( pattern string, s string, n int ) ( res [][]string, err error ) { 45 | r,err := Compile(pattern) 46 | if err != nil { return nil, err } 47 | return r.FindAllStringSubmatch( s, n ), err 48 | } 49 | 50 | //-- 51 | 52 | func ReplaceAllString( pattern string, src string, repl string) ( res string, err error ) { 53 | r,err := Compile(pattern) 54 | if err != nil { return "", err } 55 | return r.ReplaceAllString( src, repl ),err; 56 | } 57 | 58 | //-- 59 | 60 | func Split( pattern string, s string, n int ) ( res []string, err error ) { 61 | r,err := Compile(pattern) 62 | if err != nil { return nil, err } 63 | return r.Split( s, n ),err; 64 | } 65 | -------------------------------------------------------------------------------- /experimental/regexTileSetFromGff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arvados/lightning-server-legacy/1046a252ac9968c6c8080dda339213a6339ef745/experimental/regexTileSetFromGff -------------------------------------------------------------------------------- /experimental/sloppyjson/README.md: -------------------------------------------------------------------------------- 1 | SloppyJSON 2 | ========== 3 | 4 | A no-frills JSON parser for Go. 5 | 6 | JSON is parsed and loaded into a SloppyJSON structure: 7 | 8 | 9 | // S - (S)tring 10 | // L - (L)ist 11 | // O - (O)bject 12 | // P - (P)recision value (float) 13 | // Y - t(Y)pe : (S|L|O|P|true|false|null) 14 | // 15 | type SloppyJSON struct { 16 | S string 17 | L []*SloppyJSON 18 | O map[string]*SloppyJSON 19 | P float64 20 | Y string 21 | } 22 | 23 | 24 | The `Y` variable holds the type ("S" for string, "L" for array, "O" for object, "P" for float64 or 25 | one of "true", "false" or "null" for boolean or null types). 26 | 27 | The appropriate element in the SloppyJSON structure will be populated depending on the value 28 | indicated by the `Y` type. 29 | 30 | SloppyJSON is mildly faster than using `encoding/json` (around ~30%). 31 | 32 | Usage 33 | ===== 34 | 35 | package main 36 | 37 | import "fmt" 38 | import "github.com/curoverse/lightning/experimental/sloppyjson" 39 | 40 | func main() { 41 | sj,err := sloppyjson.Loads(`{ "obj" : { "test" : "a test object value" }, 42 | "str" : "a string!", 43 | "arr" : [ "a", "b" ], 44 | "flo" : 0.12, 45 | "f" : false, 46 | "t": true, 47 | "n": null }`) 48 | if err!=nil { panic(err) } 49 | 50 | fmt.Printf(" obj.test : %s\n", sj.O["obj"].O["test"].S ) 51 | fmt.Printf(" str : %s\n", sj.O["str"].S ) 52 | fmt.Printf(" arr[0] : %s, arr[1] : %s\n", sj.O["arr"].L[0].S, sj.O["arr"].L[1].S ) 53 | fmt.Printf(" flo : %f\n", sj.O["flo"].P ) 54 | fmt.Printf(" t : %s\n", sj.O["t"].Y ) 55 | fmt.Printf(" f : %s\n", sj.O["f"].Y ) 56 | fmt.Printf(" n : %s\n", sj.O["n"].Y ) 57 | 58 | // Produces: 59 | // 60 | // obj.test : a test object value 61 | // str : a string! 62 | // arr[0] : a, arr[1] : b 63 | // flo : 0.120000 64 | // t : true 65 | // f : false 66 | // n : null 67 | // 68 | 69 | } 70 | 71 | -------------------------------------------------------------------------------- /experimental/sloppyjson/sloppyjson_test.go: -------------------------------------------------------------------------------- 1 | package sloppyjson 2 | 3 | import "testing" 4 | 5 | var json_tests []string = []string{ 6 | "{}", 7 | "\n\n\n{}", 8 | "\n\n\n{\n\n}", 9 | "\n\n\n{ }", 10 | "\n\n\n{}\n", 11 | "\n\n\n{} ", 12 | "[]", 13 | " []", 14 | "[] ", 15 | "[ ]", 16 | "\n\n[ ]\n\n ", 17 | "[ \"str\", \"ing\" ] ", 18 | "\n[ \"str\", \"in\", \"g\" ] ", 19 | "[ \"str\" ] ", 20 | " { \"str\" : \"ing\", \"gni\" : \"rts\" } ", 21 | " { \n\n \"str\" : \"ing\" }" } 22 | 23 | func TestLoads( t *testing.T ) { 24 | 25 | for k:=0 ; k= maxVarIdx && i2 >= maxVarIdx) || 71 | // (c1 == '#' || c2 == '#') { 72 | // continue 73 | // } 74 | log.Info("In index %d, c1='%s' but c2='%s'\n", idx-1, string(c1), string(c2)) 75 | return 76 | } 77 | } 78 | 79 | log.Info("Two abv files are prefect match!") 80 | } 81 | -------------------------------------------------------------------------------- /experimental/tileruler/cmd/gen_test.go: -------------------------------------------------------------------------------- 1 | package cmd 2 | 3 | import ( 4 | "testing" 5 | 6 | . "github.com/smartystreets/goconvey/convey" 7 | 8 | "github.com/curoverse/lightning/experimental/tileruler/modules/base" 9 | ) 10 | 11 | func Test_calInitImg(t *testing.T) { 12 | type Val struct { 13 | endBandIdx, endPosIdx int 14 | slotPixel int 15 | boxNum, border int 16 | x, y int 17 | } 18 | vals := []Val{ 19 | {9, 9, 1, 13, 1, 139, 139}, 20 | {9, 99, 1, 13, 1, 1399, 139}, 21 | {99, 99, 1, 13, 1, 1399, 1399}, 22 | {99, 999, 1, 13, 1, 13999, 1399}, 23 | {862, 999, 1, 13, 1, 13999, 12081}, 24 | {862, 9999, 1, 13, 1, 139999, 12081}, 25 | {862, 19999, 1, 13, 1, 279999, 12081}, 26 | {862, 29999, 1, 13, 1, 419999, 12081}, 27 | {862, 39999, 1, 13, 1, 559999, 12081}, 28 | {862, 49999, 1, 13, 1, 699999, 12081}, 29 | {862, 59999, 1, 13, 1, 839999, 12081}, 30 | {862, 59999, 2, 13, 1, 1619999, 23300}, 31 | {862, 59999, 2, 13, 2, 1679998, 24162}, 32 | {862, 59999, 2, 14, 2, 1799998, 25888}, 33 | {862, 59999, 2, 15, 2, 1919998, 27614}, 34 | } 35 | Convey("Calculate init image x and y", t, func() { 36 | for _, v := range vals { 37 | opt := base.Option{ 38 | Range: &base.Range{ 39 | EndBandIdx: v.endBandIdx, 40 | EndPosIdx: v.endPosIdx, 41 | }, 42 | SlotPixel: v.slotPixel, 43 | } 44 | So(calInitImgX(opt, v.boxNum, v.border), ShouldEqual, v.x) 45 | So(calInitImgY(opt, opt.EndBandIdx+1, v.boxNum, v.border), ShouldEqual, v.y) 46 | } 47 | }) 48 | } 49 | -------------------------------------------------------------------------------- /experimental/tileruler/cmd/plot.go: -------------------------------------------------------------------------------- 1 | package cmd 2 | 3 | import ( 4 | "github.com/curoverse/lightning/experimental/tileruler/modules/cli" 5 | "github.com/curoverse/lightning/experimental/tileruler/modules/plot" 6 | ) 7 | 8 | var CmdPlot = cli.Command{ 9 | Name: "plot", 10 | Usage: "run plot", 11 | Action: runPlot, 12 | Flags: []cli.Flag{ 13 | cli.StringFlag{"http-port", "8000", "HTTP port"}, 14 | }, 15 | } 16 | 17 | func runPlot(ctx *cli.Context) { 18 | opt := setup(ctx) 19 | 20 | plot.Start(opt) 21 | } 22 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/base/cmd.go: -------------------------------------------------------------------------------- 1 | // +build go1.2 2 | 3 | // Copyright 2013 com authors 4 | // 5 | // Licensed under the Apache License, Version 2.0 (the "License"): you may 6 | // not use this file except in compliance with the License. You may obtain 7 | // a copy of the License at 8 | // 9 | // http://www.apache.org/licenses/LICENSE-2.0 10 | // 11 | // Unless required by applicable law or agreed to in writing, software 12 | // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 13 | // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 14 | // License for the specific language governing permissions and limitations 15 | // under the License. 16 | 17 | package base 18 | 19 | import ( 20 | "bytes" 21 | "os/exec" 22 | ) 23 | 24 | // ExecCmdDirBytes executes system command in given directory 25 | // and return stdout, stderr in bytes type, along with possible error. 26 | func ExecCmdDirBytes(dir, cmdName string, args ...string) ([]byte, []byte, error) { 27 | bufOut := new(bytes.Buffer) 28 | bufErr := new(bytes.Buffer) 29 | 30 | cmd := exec.Command(cmdName, args...) 31 | cmd.Dir = dir 32 | cmd.Stdout = bufOut 33 | cmd.Stderr = bufErr 34 | 35 | err := cmd.Run() 36 | return bufOut.Bytes(), bufErr.Bytes(), err 37 | } 38 | 39 | // ExecCmdBytes executes system command 40 | // and return stdout, stderr in bytes type, along with possible error. 41 | func ExecCmdBytes(cmdName string, args ...string) ([]byte, []byte, error) { 42 | return ExecCmdDirBytes("", cmdName, args...) 43 | } 44 | 45 | // ExecCmdDir executes system command in given directory 46 | // and return stdout, stderr in string type, along with possible error. 47 | func ExecCmdDir(dir, cmdName string, args ...string) (string, string, error) { 48 | bufOut, bufErr, err := ExecCmdDirBytes(dir, cmdName, args...) 49 | return string(bufOut), string(bufErr), err 50 | } 51 | 52 | // ExecCmd executes system command 53 | // and return stdout, stderr in string type, along with possible error. 54 | func ExecCmd(cmdName string, args ...string) (string, string, error) { 55 | return ExecCmdDir("", cmdName, args...) 56 | } 57 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/cli/cli.go: -------------------------------------------------------------------------------- 1 | // Package cli provides a minimal framework for creating and organizing command line 2 | // Go applications. cli is designed to be easy to understand and write, the most simple 3 | // cli application can be written as follows: 4 | // func main() { 5 | // cli.NewApp().Run(os.Args) 6 | // } 7 | // 8 | // Of course this application does not do much, so let's make this an actual application: 9 | // func main() { 10 | // app := cli.NewApp() 11 | // app.Name = "greet" 12 | // app.Usage = "say a greeting" 13 | // app.Action = func(c *cli.Context) { 14 | // println("Greetings") 15 | // } 16 | // 17 | // app.Run(os.Args) 18 | // } 19 | package cli 20 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/log/log.go: -------------------------------------------------------------------------------- 1 | package log 2 | 3 | import ( 4 | "fmt" 5 | "os" 6 | "runtime" 7 | "time" 8 | ) 9 | 10 | const ( 11 | PREFIX = "[TR]" 12 | TIME_FORMAT = "06-01-02 15:04:05" 13 | ) 14 | 15 | var ( 16 | NonColor bool 17 | LEVEL_FLAGS = [...]string{"DEBUG", " INFO", " WARN", "ERROR", "FATAL"} 18 | ) 19 | 20 | func init() { 21 | if runtime.GOOS == "windows" { 22 | NonColor = true 23 | } 24 | } 25 | 26 | const ( 27 | DEBUG = iota 28 | INFO 29 | WARNING 30 | ERROR 31 | FATAL 32 | ) 33 | 34 | func Print(level int, format string, args ...interface{}) { 35 | if NonColor { 36 | fmt.Printf("%s %s [%s] %s\n", 37 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 38 | fmt.Sprintf(format, args...)) 39 | if level == FATAL { 40 | os.Exit(1) 41 | } 42 | return 43 | } 44 | 45 | switch level { 46 | case DEBUG: 47 | fmt.Printf("%s \033[36m%s\033[0m [\033[34m%s\033[0m] %s\n", 48 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 49 | fmt.Sprintf(format, args...)) 50 | case INFO: 51 | fmt.Printf("%s \033[36m%s\033[0m [\033[32m%s\033[0m] %s\n", 52 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 53 | fmt.Sprintf(format, args...)) 54 | case WARNING: 55 | fmt.Printf("%s \033[36m%s\033[0m [\033[33m%s\033[0m] %s\n", 56 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 57 | fmt.Sprintf(format, args...)) 58 | case ERROR: 59 | fmt.Printf("%s \033[36m%s\033[0m [\033[31m%s\033[0m] %s\n", 60 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 61 | fmt.Sprintf(format, args...)) 62 | case FATAL: 63 | fmt.Printf("%s \033[36m%s\033[0m [\033[35m%s\033[0m] %s\n", 64 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 65 | fmt.Sprintf(format, args...)) 66 | os.Exit(1) 67 | default: 68 | fmt.Printf("%s %s [%s] %s\n", 69 | PREFIX, time.Now().Format(TIME_FORMAT), LEVEL_FLAGS[level], 70 | fmt.Sprintf(format, args...)) 71 | } 72 | } 73 | 74 | func Debug(format string, args ...interface{}) { 75 | Print(DEBUG, format, args...) 76 | } 77 | 78 | func Warn(format string, args ...interface{}) { 79 | Print(WARNING, format, args...) 80 | } 81 | 82 | func Info(format string, args ...interface{}) { 83 | Print(INFO, format, args...) 84 | } 85 | 86 | func Error(format string, args ...interface{}) { 87 | Print(ERROR, format, args...) 88 | } 89 | 90 | func Fatal(format string, args ...interface{}) { 91 | Print(FATAL, format, args...) 92 | } 93 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/bar.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | ) 7 | 8 | type barDataSetsType struct { 9 | FillColor string `json:"fillColor"` 10 | StrokeColor string `json:"strokeColor"` 11 | Data []int `json:"data"` 12 | } 13 | 14 | type barDataType struct { 15 | Labels []string `json:"labels"` 16 | Datasets []*barDataSetsType `json:"datasets"` 17 | } 18 | 19 | type BarChart struct { 20 | name string 21 | } 22 | 23 | func (b *BarChart) Canvas(name string, height int, width int) string { 24 | if height == 0 { 25 | height = 300 26 | } 27 | if width == 0 { 28 | width = 400 29 | } 30 | return fmt.Sprintf("", name, height, width) 31 | } 32 | 33 | func (l *BarChart) JsonCode(c *ChartDataType) (string, error) { 34 | bars := new(barDataType) 35 | 36 | barNum := c.ValueNum() 37 | 38 | bars.Labels = c.ItemName() 39 | bars.Datasets = make([]*barDataSetsType, 0, barNum) 40 | 41 | for i := 0; i < barNum; i++ { 42 | bar := &barDataSetsType{} 43 | bar.FillColor = GetColorValue(i) 44 | bar.StrokeColor = GetColorValue(i) 45 | 46 | bar.Data = c.ItemValue(i) 47 | bars.Datasets = append(bars.Datasets, bar) 48 | } 49 | 50 | b, err := json.Marshal(bars) 51 | if err != nil { 52 | return "", err 53 | } 54 | 55 | return fmt.Sprintf("var barJsonStr = '%s';", string(b)), nil 56 | } 57 | 58 | func (l *BarChart) NewChart(name string) string { 59 | return fmt.Sprintf("new Chart(document.getElementById(\"%s\").getContext(\"2d\")).Bar(eval('('+barJsonStr+')'));", name) 60 | } 61 | 62 | func init() { 63 | bar := new(BarChart) 64 | bar.name = "bar" 65 | 66 | ChartHandlers["bar"] = bar 67 | } 68 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/chart.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | var colorSets = [...]string{ 4 | "#2980B9", // blue 5 | "#C0392B", // red 6 | "#F39C12", // yellow 7 | "#8E44AD", // WISTERIA 8 | "#16A085", // green 9 | "#2C3E50", // black 10 | } 11 | 12 | func GetColorValue(i int) string { 13 | if i >= len(colorSets) { 14 | return colorSets[0] 15 | } 16 | return colorSets[i] 17 | } 18 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/line.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | ) 7 | 8 | type lineDataSetsType struct { 9 | FillColor string `json:"fillColor"` 10 | StrokeColor string `json:"strokeColor"` 11 | PointColor string `json:"pointColor"` 12 | PointStrokeColor string `json:"pointStrokeColor"` 13 | Data []int `json:"data"` 14 | } 15 | 16 | type lineDataType struct { 17 | Labels []string `json:"labels"` 18 | Datasets []*lineDataSetsType `json:"datasets"` 19 | } 20 | 21 | type LineChart struct { 22 | name string 23 | } 24 | 25 | func (l *LineChart) Canvas(name string, height int, width int) string { 26 | if height == 0 { 27 | height = 300 28 | } 29 | if width == 0 { 30 | width = 400 31 | } 32 | return fmt.Sprintf("", name, height, width) 33 | } 34 | 35 | func (l *LineChart) JsonCode(c *ChartDataType) (string, error) { 36 | lines := new(lineDataType) 37 | 38 | lineNum := c.ValueNum() 39 | 40 | lines.Labels = c.ItemName() 41 | lines.Datasets = make([]*lineDataSetsType, 0, lineNum) 42 | 43 | for i := 0; i < lineNum; i++ { 44 | line := &lineDataSetsType{} 45 | line.FillColor = "rgba(220,220,220,0)" 46 | line.StrokeColor = GetColorValue(i) 47 | line.PointColor = GetColorValue(i) 48 | line.PointStrokeColor = "#fff" 49 | 50 | line.Data = c.ItemValue(i) 51 | lines.Datasets = append(lines.Datasets, line) 52 | } 53 | 54 | b, err := json.Marshal(lines) 55 | if err != nil { 56 | return "", err 57 | } 58 | 59 | return fmt.Sprintf("var lineJsonStr = '%s';", string(b)), nil 60 | } 61 | 62 | func (l *LineChart) NewChart(name string) string { 63 | return fmt.Sprintf("new Chart(document.getElementById(\"%s\").getContext(\"2d\")).Line(eval('('+lineJsonStr+')'));", name) 64 | } 65 | 66 | func init() { 67 | line := new(LineChart) 68 | line.name = "line" 69 | 70 | ChartHandlers["line"] = line 71 | } 72 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/line_no_curve.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | type LineNoCurveChart struct { 8 | LineChart 9 | } 10 | 11 | func (l *LineNoCurveChart) NewChart(name string) string { 12 | return fmt.Sprintf("new Chart(document.getElementById(\"%s\").getContext(\"2d\")).Line(eval('('+lineJsonStr+')'), {pointDot:false, bezierCurve:false});", name) 13 | } 14 | 15 | func init() { 16 | line := new(LineNoCurveChart) 17 | line.name = "line_no_curve" 18 | 19 | ChartHandlers["line_no_curve"] = line 20 | } 21 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/line_no_dot.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | type LineNoDotChart struct { 8 | LineChart 9 | } 10 | 11 | func (l *LineNoDotChart) NewChart(name string) string { 12 | return fmt.Sprintf("new Chart(document.getElementById(\"%s\").getContext(\"2d\")).Line(eval('('+lineJsonStr+')'), {pointDot : false});", name) 13 | } 14 | 15 | func init() { 16 | line := new(LineNoDotChart) 17 | line.name = "line_no_dot" 18 | 19 | ChartHandlers["line_no_dot"] = line 20 | } 21 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/main.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | import ( 4 | "github.com/curoverse/lightning/experimental/tileruler/modules/base" 5 | "github.com/curoverse/lightning/experimental/tileruler/modules/log" 6 | ) 7 | 8 | func Start(opt base.Option) { 9 | log.Info("Start listening on :%s", opt.HttpPort) 10 | log.Fatal("%v", ListenAndServe("0.0.0.0:"+opt.HttpPort)) 11 | } 12 | -------------------------------------------------------------------------------- /experimental/tileruler/modules/plot/pie.go: -------------------------------------------------------------------------------- 1 | package plot 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | ) 7 | 8 | type pieDataSetsType struct { 9 | Color string `json:"color"` 10 | Value int `json:"value"` 11 | } 12 | 13 | type PieChart struct { 14 | name string 15 | } 16 | 17 | func (b *PieChart) Canvas(name string, height int, width int) string { 18 | if height == 0 { 19 | height = 300 20 | } 21 | if width == 0 { 22 | width = 400 23 | } 24 | return fmt.Sprintf("", name, height, width) 25 | } 26 | 27 | func (l *PieChart) JsonCode(c *ChartDataType) (string, error) { 28 | pieNum := c.ItemNum() 29 | items := c.ItemValue(0) 30 | 31 | datasets := make([]*pieDataSetsType, 0, pieNum) 32 | 33 | for i := 0; i < pieNum; i++ { 34 | pie := &pieDataSetsType{} 35 | pie.Color = GetColorValue(i) 36 | pie.Value = items[i] 37 | 38 | datasets = append(datasets, pie) 39 | } 40 | 41 | b, err := json.Marshal(datasets) 42 | if err != nil { 43 | return "", err 44 | } 45 | 46 | return fmt.Sprintf("var pieJsonStr = '%s';", string(b)), nil 47 | } 48 | 49 | func (l *PieChart) NewChart(name string) string { 50 | return fmt.Sprintf("new Chart(document.getElementById(\"%s\").getContext(\"2d\")).Pie(eval('('+pieJsonStr+')'));", name) 51 | } 52 | 53 | func init() { 54 | pie := new(PieChart) 55 | pie.name = "pie" 56 | 57 | ChartHandlers["pie"] = pie 58 | } 59 | -------------------------------------------------------------------------------- /experimental/tileruler/tileruler.go: -------------------------------------------------------------------------------- 1 | // Tile Ruler is a command line tool for generating PNGs based on given abv files. 2 | package main 3 | 4 | import ( 5 | "os" 6 | "runtime" 7 | 8 | "github.com/curoverse/lightning/experimental/tileruler/cmd" 9 | "github.com/curoverse/lightning/experimental/tileruler/modules/cli" 10 | ) 11 | 12 | const ( 13 | APP_VER = "0.3.3.0920" 14 | ) 15 | 16 | func init() { 17 | runtime.GOMAXPROCS(runtime.NumCPU()) 18 | cmd.AppVer = APP_VER 19 | } 20 | 21 | func main() { 22 | app := cli.NewApp() 23 | app.Name = "Tile Ruler" 24 | app.Usage = "Generate, reverse and compare genome tiles" 25 | app.Version = APP_VER 26 | app.Commands = []cli.Command{ 27 | cmd.CmdGen, 28 | cmd.CmdReverse, 29 | cmd.CmdCompare, 30 | cmd.CmdStat, 31 | cmd.CmdPlot, 32 | cmd.CmdAbv, 33 | } 34 | app.Flags = append(app.Flags, []cli.Flag{ 35 | cli.BoolFlag{"noterm, n", "disable color output"}, 36 | cli.BoolFlag{"crunch, c", "crunch job mode"}, 37 | }...) 38 | app.Run(os.Args) 39 | } 40 | --------------------------------------------------------------------------------