├── chefignore
├── 01bioc.cnf
├── test
├── shared
│ └── spec_helper.rb
├── integration
│ ├── roles
│ │ ├── bbs_devel_linux.json
│ │ └── bbs_release_linux.json
│ └── default
│ │ └── serverspec
│ │ └── default_spec.rb
└── data_bags
│ └── BBS
│ ├── google_api_key.json
│ ├── github_chef_key.json
│ ├── incoming_private_key.json
│ ├── incoming_public_key.json
│ ├── outgoing_private_key.json
│ └── isr_credentials.json
├── config
├── ssh
├── Berksfile
├── .gitignore
├── metadata.rb
├── spec
└── unit
│ └── recipes
│ └── default_spec.rb
├── secret_for_testing_only
├── config.yml.example
├── .kitchen.yml
├── recipes
├── crontab.rb
└── default.rb
├── attributes
└── default.rb
├── Vagrantfile
└── README.md
/chefignore:
--------------------------------------------------------------------------------
1 | .kitchen
2 |
--------------------------------------------------------------------------------
/01bioc.cnf:
--------------------------------------------------------------------------------
1 | shell_escape=t
2 |
--------------------------------------------------------------------------------
/test/shared/spec_helper.rb:
--------------------------------------------------------------------------------
1 | require 'serverspec'
2 |
3 | set :backend, :exec
4 |
--------------------------------------------------------------------------------
/config:
--------------------------------------------------------------------------------
1 | Host bbsvm
2 | StrictHostKeyChecking=no
3 | UserKnownHostsFile=/dev/null
4 |
--------------------------------------------------------------------------------
/ssh:
--------------------------------------------------------------------------------
1 | # for use by git
2 | ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no $*
--------------------------------------------------------------------------------
/Berksfile:
--------------------------------------------------------------------------------
1 | source 'https://supermarket.chef.io'
2 |
3 | metadata
4 |
5 |
6 |
7 | cookbook 'apt', '~> 2.9.2'
8 | cookbook 'cron', '~> 1.7.6'
9 |
--------------------------------------------------------------------------------
/test/integration/roles/bbs_devel_linux.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "bbs_devel_linux",
3 | "default_attributes": {
4 | "reldev": "devel"
5 | },
6 | "run_list": [
7 | "recipe[BBS-provision-cookbook::default]"
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/test/integration/roles/bbs_release_linux.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "bbs_release_linux",
3 | "default_attributes": {
4 | "reldev": "release"
5 | },
6 | "run_list": [
7 | "recipe[BBS-provision-cookbook::default]"
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/test/data_bags/BBS/google_api_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "google_api_key",
3 | "value": {
4 | "encrypted_data": "8pYqEQMCz8wzMBjpj63652rbauQNEPEY+kljgcBNMktrBDBz9zir/rhzwxLz\n1WcU\n",
5 | "iv": "3HvsInM8OkPQ4VV3z3MR1w==\n",
6 | "version": 1,
7 | "cipher": "aes-256-cbc"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | config.yml
2 | svnauth.tar.gz
3 | id_rsa
4 | id_rsa.pub
5 | .vagrant/
6 | report.tgz.kitchen/
7 | .kitchen.local.yml
8 | .kitchen/
9 | .vagrant
10 | Berksfile.lock
11 | *~
12 | *#
13 | .#*
14 | \#*#
15 | .*.sw[a-z]
16 | *.un~
17 | # Bundler
18 | Gemfile.lock
19 | bin/*
20 | .bundle/*
21 | knife.rb
22 |
--------------------------------------------------------------------------------
/metadata.rb:
--------------------------------------------------------------------------------
1 | name 'BBS-provision-cookbook'
2 | maintainer 'Andrzej Oles'
3 | maintainer_email 'aoles@embl.de'
4 | license 'Artistic 2.0'
5 | description 'Installs Bioconductor System Dependencies'
6 | long_description 'Installs Bioconductor System Dependencies'
7 | version '0.1.40'
8 | depends 'apt'
9 | depends 'cron'
10 |
--------------------------------------------------------------------------------
/test/data_bags/BBS/github_chef_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "github_chef_key",
3 | "value": {
4 | "encrypted_data": "PBOHGbUarxIbn2ARqUnDdONLAn9ecfCcZdeCvcomjD6Ws7nsX4ptqkoengeG\n5XvkCahYrKRJm0ZHxne/Ri1N0A==\n",
5 | "iv": "7RSXgfYZxBRFmlDEv42lhQ==\n",
6 | "version": 1,
7 | "cipher": "aes-256-cbc"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/test/data_bags/BBS/incoming_private_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "incoming_private_key",
3 | "value": {
4 | "encrypted_data": "/gtOW4nIyoUZwhtfH3FRgwwk9Anlro5KIq7gHaauInnShbowWCdeOVuz4sd7\nv17Vc0Wox02M/9x4oHLQEwxS3Q==\n",
5 | "iv": "z0EsvKz17AQRFggByevuRg==\n",
6 | "version": 1,
7 | "cipher": "aes-256-cbc"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/test/data_bags/BBS/incoming_public_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "incoming_public_key",
3 | "value": {
4 | "encrypted_data": "V/ZLE2a+Has9e9CQHXBDV8b3kByKYC05UbEb1pLYsNmxE/2epgiHoRIjkc3E\nGFFN41le8pYH2h0UsX/iwczDqQ==\n",
5 | "iv": "LD7SVkS0X3+lHZ/qd9V2Pw==\n",
6 | "version": 1,
7 | "cipher": "aes-256-cbc"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/test/data_bags/BBS/outgoing_private_key.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "outgoing_private_key",
3 | "value": {
4 | "encrypted_data": "7wcft+UN58vl5GZerk4pRmi8HMC5Aom1p48KdD+Zg2x91ScCrfokUm0yZ1bp\nwBqR+h2zKFyUE5Qz+orb0RSD4g==\n",
5 | "iv": "X47O3eTUGLNcN59PqnfEeg==\n",
6 | "version": 1,
7 | "cipher": "aes-256-cbc"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/test/data_bags/BBS/isr_credentials.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "isr_credentials",
3 | "username": {
4 | "encrypted_data": "NH/nXpKnfJq5JcXMhjceakEofhppVSUsuGsqReIU1zKrfvlWbzusiiT3ub3i\nHKne\n",
5 | "iv": "PqfCTZ/pfSIczefwAuM52w==\n",
6 | "version": 1,
7 | "cipher": "aes-256-cbc"
8 | },
9 | "password": {
10 | "encrypted_data": "R4ZQZFZbSlTBO+li2ru7+OdL1/3BkDZxpJMrjRfjGNjN48bdxCYMDZ1rSakv\nPchj\n",
11 | "iv": "ftHYECJLgWPe8bWM1C/UPA==\n",
12 | "version": 1,
13 | "cipher": "aes-256-cbc"
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/spec/unit/recipes/default_spec.rb:
--------------------------------------------------------------------------------
1 | #
2 | # Cookbook Name:: issue_tracker_github_cookbook
3 | # Spec:: default
4 | #
5 | # Copyright (c) 2016 The Authors, All Rights Reserved.
6 |
7 | require 'spec_helper'
8 |
9 | describe 'BBS-provision-cookbook::default' do
10 | context 'When all attributes are default, on an unspecified platform' do
11 | let(:chef_run) do
12 | runner = ChefSpec::ServerRunner.new
13 | runner.converge(described_recipe)
14 | end
15 |
16 | it 'converges successfully' do
17 | expect { chef_run }.to_not raise_error
18 | end
19 | end
20 | end
21 |
--------------------------------------------------------------------------------
/secret_for_testing_only:
--------------------------------------------------------------------------------
1 | yvzByWLruige5l+dfR/jvWEx+xutPypgjjGWTZ5RP3L6zOicvbryBs7LMxNDVBNLy8qV+gjKBAmvnztqNaJlJGtcFCkor8XHfeMNjKG4A9a6LiQyTneKL8C8SRJIUHH3ZSpMHIfsX1M+OuvxmkOTV3KPoBNRAGrsllUQQm6Af3KHujxeszPLzA2OmJP9UupW9gk0Xqd6wE4u0Hi2ZxSfg399rVcG9eM5MWfiFm+WnqzfIQOG1UNSt5Ia9YU46di2QvB8BGqT9+aHEmkFuM02AN0xGv7P2bshlVRFC1SHJRgyDcf07Y/YU8PTQxL3JNGEY/3n/axTmQs8QFqqQcDqrEwWFHv+5Qzt46cO96auFH4bTUyFhBnFmibehQ/i8iYc3xJW/mxe75BUVRA7qJmhFsj+7wZxoArt9K5UIiPovmfGGrQNOEA8s7KdGXivoQuvI3DJO907h2IZqfS1jn0TSnU/JCHwnC6sEwKoAJ7VteAxti73QXB3hmC3dOWFBtndi+3nHFlX1JV1f4pVKUsxAjh5PP56V+6AQ+XmN0a3uhkgoYBfQ2vWMg7bYs3ZKYZbDDwXW4f9A+xvmnaO4404wLCGaVKVSGebm9LRvZDNfNsrRDn+TQENJ4JyCyx+++A30JZzXYxFqegXa1bFk1uUkryZTiv8BrHmQrhkocKec58=
--------------------------------------------------------------------------------
/test/integration/default/serverspec/default_spec.rb:
--------------------------------------------------------------------------------
1 | #require 'spec_helper'
2 | #require_relative '../../../kitchen/data/spec_helper'
3 | # require_relative '/tmp/verifier/suites/serverspec/spec_helper'
4 | # require_relative './spec_helper'
5 | require_relative '/tmp/kitchen/data/spec_helper'
6 |
7 |
8 | ## IMPORTANT: We are not using these tests anymore. instead
9 | ## we are using control groups in the recipe.
10 |
11 | describe "BBS-provision-cookbook::default" do
12 | # describe package('git') do
13 | # it {should be_installed}
14 | # end
15 |
16 |
17 | describe file('/etc/timezone') do
18 | its(:content) {should_not match /UTC|GMT/}
19 | end
20 |
21 | describe file('/etc/passwd') do
22 | its(:content) {should match /biocbuild/}
23 | end
24 |
25 |
26 | end
27 |
--------------------------------------------------------------------------------
/config.yml.example:
--------------------------------------------------------------------------------
1 | # copy config.yml.example to config.yml and edit that to match your settings
2 | # don't overwrite config.yml!
3 | ---
4 | # r_version should be a string like R-x.y.z (example: R-3.1.0), or one of the following:
5 | # R-devel R-alpha R-beta R-patched R-rc
6 | r_version: R-3.1.1
7 | # use_devel should be TRUE or FALSE, also indicates whether to pull from trunk
8 | use_devel: TRUE
9 | # set to e.g. 3.0
10 | bioc_version: 3.0
11 | svn_password: XXXXXXXX
12 | hostname: bbsvm
13 | timezone: America/Los_Angeles
14 |
15 | ## AWS section:
16 | # see https://github.com/mitchellh/vagrant-aws/#quick-start
17 | access_key_id: "XXXXX"
18 | secret_access_key: "YYYYY"
19 | keypair_name: "bioc-default"
20 | # if you are using a vpc security group you must
21 | # specify the group below by its id, NOT its name
22 | security_groups: "bioc_default"
23 | ami: "ami-ZZZZZ"
24 | private_key_path: "/path/to/bioc-default.pem"
25 | instance_type: "m1.large"
26 | tags: {Name: some name}
27 | # these are optional (subnet_id is required for instance
28 | # types that start in a vpc)
29 | #availability_zone: us-east-1a
30 | #subnet_id: subnet-d66a05ec # us-east-1a
31 |
32 |
33 | # don't overwrite config.yml!
34 |
--------------------------------------------------------------------------------
/.kitchen.yml:
--------------------------------------------------------------------------------
1 | # ec2 host/instance name is stored in .kitchen/default-ubuntu-1604.yml
2 | # last_action should probably be set to create in that file.
3 | # here is what the whole file looks like:
4 | # ---
5 | # server_id: i-xxxxxxx
6 | # hostname: 12.34.56.78
7 | # last_action: create
8 | ---
9 | driver:
10 | # name: vagrant
11 | name: ec2
12 | aws_ssh_key_id: bioc-default
13 | region: us-east-1
14 | require_chef_omnibus: true
15 | instance_type: t2.large
16 | security_group_ids: ["sg-308ba149"]
17 | subnet_id: subnet-d66a05ec
18 | availability_zone: us-east-1a
19 |
20 | transport:
21 | ssh_key: /Users/dtenenba/.ec2/bioc-default.pem
22 | username: ubuntu
23 |
24 | provisioner:
25 | name: chef_zero
26 | client_rb:
27 | audit_mode: :enabled
28 | data_path: test/shared
29 | data_bags_path: ./test/data_bags
30 | encrypted_data_bag_secret_key_path: ./secret_for_testing_only
31 |
32 |
33 | # Uncomment the following verifier to leverage Inspec instead of Busser (the
34 | # default verifier)
35 | # verifier:
36 | # name: inspec
37 |
38 | platforms:
39 | # - name: ubuntu-14.04
40 | - name: ubuntu-16.04
41 | driver:
42 | image_id: ami-c8c6d9a2
43 |
44 | suites:
45 | - name: default
46 | run_list:
47 | - role[bbs_devel_linux]
48 | # - recipe[BBS-provision-cookbook::default]
49 |
--------------------------------------------------------------------------------
/recipes/crontab.rb:
--------------------------------------------------------------------------------
1 | #
2 | # Cookbook Name:: BBS-provision-cookbook
3 | # Recipe:: cron
4 | #
5 | # Copyright (c) 2016 Andrzej Oles, All Rights Reserved.
6 | include_recipe 'cron'
7 |
8 | if node["reldev"] == "devel"
9 | reldev = :dev
10 | elsif node["reldev"] == "release"
11 | reldev = :rel
12 | else
13 | raise "are the bbs_devel and bbs_release roles defined?"
14 | end
15 |
16 | bioc_version = node['bioc_version'][reldev]
17 | cron = node['cron']
18 |
19 |
20 | ## biocbuild
21 |
22 | %w(bioc data-experiment).each do |type|
23 |
24 | %w(prerun run postrun).each do |action|
25 |
26 | cron "#{action} #{type}" do
27 | time = cron[action][type][reldev]
28 | user 'biocbuild'
29 | command %W{
30 | /bin/bash --login -c
31 | 'cd /home/biocbuild/BBS/#{bioc_version}/#{type}/`hostname` &&
32 | ./#{action}.sh >>/home/biocbuild/bbs-#{bioc_version}-#{type}/log/`hostname`-`date +\\%Y\\%m\\%d`-#{action}.log 2>&1'
33 | }.join(' ')
34 | minute time['minute']
35 | hour time['hour']
36 | day time['day']
37 | month time['month']
38 | weekday time['weekday']
39 | end
40 |
41 | end
42 |
43 | end
44 |
45 | ## biocadmin
46 |
47 | %w(bioc data-experiment data-annotation).each do |type|
48 |
49 | cron "propagate #{type}" do
50 | time = cron['propagate'][type][reldev]
51 | user 'biocadmin'
52 | command %W{
53 | cd /home/biocadmin/manage-BioC-repos/#{bioc_version} &&
54 | (#{"./updateReposPkgs-#{type}.sh && " unless type=="data-annotation"}./prepareRepos-#{type}.sh && ./pushRepos-#{type}.sh)
55 | >>/home/biocadmin/cron.log/#{bioc_version}/updateRepos-#{type}-`date +\\%Y\\%m\\%d`.log 2>&1
56 | }.join(' ')
57 | minute time['minute']
58 | hour time['hour']
59 | day time['day']
60 | month time['month']
61 | weekday time['weekday']
62 | end
63 |
64 | end
65 |
--------------------------------------------------------------------------------
/attributes/default.rb:
--------------------------------------------------------------------------------
1 | ## At release time these 4 need to be modified:
2 | ## 'bioc_version', 'r_version', 'r_url' and 'r_src_dir'
3 | default['bioc_version'] = {rel: '3.7', dev: '3.8'}
4 | default['r_version'] = {rel: '3.4', dev: '3.5'}
5 | default['r_url'] = {rel: 'https://cran.rstudio.com/src/base/R-3/R-3.4.3.tar.gz',
6 | dev: 'https://stat.ethz.ch/R/daily/R-devel.tar.gz'}
7 | default['r_src_dir'] = {rel: 'R-3.4.3', dev: 'R-devel'}
8 |
9 | default['desired_hostname'] = {rel: "malbec2", dev: "malbec1"}
10 | default['time_zone'] = "America/New_York"
11 | default['bbs_repos'] = 'https://github.com/Bioconductor/BBS'
12 | default['bbs_branch'] = 'master'
13 | default['root_url'] = {dev: "https://root.cern.ch/download/root_v5.34.36.source.tar.gz",
14 | rel: "https://root.cern.ch/download/root_v5.34.36.source.tar.gz"}
15 | default['jags_url'] = {dev: "https://sourceforge.net/projects/mcmc-jags/files/JAGS/4.x/Source/JAGS-4.2.0.tar.gz/download",
16 | rel: "https://sourceforge.net/projects/mcmc-jags/files/JAGS/4.x/Source/JAGS-4.2.0.tar.gz/download"}
17 | default['jags_dir'] = {dev: "JAGS-4.2.0", rel: "JAGS-4.2.0"}
18 | default['libsbml_url'] = "https://s3.amazonaws.com/linux-provisioning/libSBML-5.10.2-core-src.tar.gz"
19 | default['libsbml_dir'] = "libsbml-5.10.2"
20 | default['vienna_rna_url'] = "https://www.tbi.univie.ac.at/RNA/download/sourcecode/2_2_x/ViennaRNA-2.2.7.tar.gz"
21 | default['vienna_rna_dir'] = "ViennaRNA-2.2.7"
22 | default['vep_url'] = {dev: "https://github.com/Ensembl/ensembl-vep/archive/release/90.zip",
23 | rel: "https://github.com/Ensembl/ensembl-vep/archive/release/90.zip"}
24 | default['vep_dir'] = {dev: "ensembl-vep-release-90", rel: "ensembl-vep-release-90"}
25 | default['argtable_url'] = "http://prdownloads.sourceforge.net/argtable/argtable2-13.tar.gz"
26 | default['clustalo_url'] = "http://www.clustal.org/omega/clustal-omega-1.2.1.tar.gz"
27 | default['pandoc_url'] = "https://github.com/jgm/pandoc/releases/download/1.19.1/pandoc-1.19.1-1-amd64.deb"
28 | default['git-lfs_url'] = "https://github.com/git-lfs/git-lfs/releases/download/v1.5.5/git-lfs-linux-amd64-1.5.5.tar.gz"
29 | default['git-lfs_dir'] = "git-lfs-1.5.5"
30 |
31 | # cron info
32 |
33 | def starhash(minute: '*', hour: '*', day: '*', month: '*', weekday: '*')
34 | {minute: minute.to_s, hour: hour.to_s, day: day.to_s,
35 | month: month.to_s, weekday: weekday.to_s}
36 | end
37 |
38 | ## biocbuild
39 |
40 | default['cron']['prerun']['bioc'] = {
41 | rel: starhash(hour: 17, minute: 20),
42 | dev: starhash(hour: 17, minute: 15)
43 | }
44 |
45 | default['cron']['run']['bioc'] = {
46 | rel: starhash(hour: 17, minute: 55),
47 | dev: starhash(hour: 17, minute: 55)
48 | }
49 |
50 | default['cron']['postrun']['bioc']= {
51 | rel: starhash(hour: 15, minute: 55),
52 | dev: starhash(hour: 15, minute: 55)
53 | }
54 |
55 | default['cron']['prerun']['data-experiment'] = {
56 | rel: starhash(hour: 9, minute: 20),
57 | dev: starhash(hour: 9, minute: 20)
58 | }
59 |
60 | default['cron']['run']['data-experiment'] = {
61 | rel: starhash(hour: 9, minute: 55),
62 | dev: starhash(hour: 9, minute: 55)
63 | }
64 |
65 | default['cron']['postrun']['data-experiment'] = {
66 | rel: starhash(hour: 16, minute: 55),
67 | dev: starhash(hour: 16, minute: 55)
68 | }
69 |
70 | ## biocadmin
71 |
72 | default['cron']['propagate']['bioc'] = {
73 | rel: starhash(hour: 16, minute: 25),
74 | dev: starhash(hour: 16, minute: 25)
75 | }
76 |
77 | default['cron']['propagate']['data-experiment'] = {
78 | rel: starhash(hour: 17, minute: 35),
79 | dev: starhash(hour: 17, minute: 35)
80 | }
81 |
82 | default['cron']['propagate']['data-annotation'] = {
83 | rel: starhash(hour: 5, minute: 20),
84 | dev: starhash(hour: 5, minute: 20)
85 | }
86 |
--------------------------------------------------------------------------------
/Vagrantfile:
--------------------------------------------------------------------------------
1 | # -*- mode: ruby -*-
2 | # vi: set ft=ruby :
3 |
4 | require 'yaml'
5 | yamlconfig = YAML.load_file "config.yml"
6 |
7 |
8 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
9 | VAGRANTFILE_API_VERSION = "2"
10 |
11 | yamlconfig = YAML.load_file "config.yml"
12 |
13 |
14 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
15 | # All Vagrant configuration is done here. The most common configuration
16 | # options are documented and commented below. For a complete reference,
17 | # please see the online documentation at vagrantup.com.
18 |
19 | # Every Vagrant virtual environment requires a box to build off of.
20 | config.vm.box = "joscarsson/ubuntu-trusty64-chef"
21 |
22 | require 'time'
23 | timezone = 'Etc/GMT' + ((Time.zone_offset(Time.now.zone)/60)/60).to_s
24 | config.vm.provision :shell, :inline => "if [ $(grep -c UTC /etc/timezone) -gt 0 ]; then echo \"#{timezone}\" | sudo tee /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata; fi"
25 |
26 |
27 | config.vm.hostname = "bbsvm" # not sure if this will work on AWS
28 |
29 | config.vm.provider :virtualbox do |vb, override|
30 | override.vm.network "forwarded_port", guest: 80, host: 8066
31 | vb.memory = 4096
32 |
33 | #vb.customize ["modifyvm", :id, "--memory", "2048"]
34 | vb.customize ["modifyvm", :id, "--cpus", "4"]
35 | vb.customize ["modifyvm", :id, "--ioapic", "on"]
36 | end
37 |
38 | # start me with
39 | # vagrant up --provider=aws
40 | # if you want to run on aws
41 | # see https://groups.google.com/forum/#!topic/vagrant-up/3ABXsxm53Go
42 | config.vm.provider :aws do |aws, override|
43 | override.vm.box = "dummy"
44 | aws.access_key_id = yamlconfig['access_key_id']
45 | aws.secret_access_key = yamlconfig['secret_access_key']
46 | aws.keypair_name = yamlconfig['keypair_name']
47 | if yamlconfig.has_key? 'subnet_id'
48 | aws.subnet_id = yamlconfig['subnet_id']
49 | # aws.security_group_ids = [yamlconfig['security_groups']]
50 | else
51 | end
52 | aws.security_groups = yamlconfig['security_groups']
53 | aws.ami = yamlconfig['ami']
54 | aws.tags = yamlconfig['tags']
55 | aws.instance_type = yamlconfig['instance_type']
56 | aws.instance_ready_timeout = 240
57 | if yamlconfig.has_key? "availability_zone"
58 | aws.availability_zone = yamlconfig['availability_zone']
59 | end
60 |
61 | override.ssh.username = "ubuntu"
62 | override.ssh.private_key_path = yamlconfig['private_key_path']
63 | end
64 |
65 | config.vm.provision "chef_solo" do |chef|
66 | #chef.log_level = :debug
67 | chef.add_recipe "setup_BBS"
68 | end
69 |
70 |
71 | # Disable automatic box update checking. If you disable this, then
72 | # boxes will only be checked for updates when the user runs
73 | # `vagrant box outdated`. This is not recommended.
74 | # config.vm.box_check_update = false
75 |
76 | # Create a forwarded port mapping which allows access to a specific port
77 | # within the machine from a port on the host machine. In the example below,
78 | # accessing "localhost:8080" will access port 80 on the guest machine.
79 | # config.vm.network "forwarded_port", guest: 80, host: 8080
80 |
81 | # Create a private network, which allows host-only access to the machine
82 | # using a specific IP.
83 | # config.vm.network "private_network", ip: "192.168.33.10"
84 |
85 | # Create a public network, which generally matched to bridged network.
86 | # Bridged networks make the machine appear as another physical device on
87 | # your network.
88 | # config.vm.network "public_network"
89 |
90 | # If true, then any SSH connections made will enable agent forwarding.
91 | # Default value: false
92 | # config.ssh.forward_agent = true
93 |
94 | # Share an additional folder to the guest VM. The first argument is
95 | # the path on the host to the actual folder. The second argument is
96 | # the path on the guest to mount the folder. And the optional third
97 | # argument is a set of non-required options.
98 | # config.vm.synced_folder "../data", "/vagrant_data"
99 |
100 | # Provider-specific configuration so you can fine-tune various
101 | # backing providers for Vagrant. These expose provider-specific options.
102 | # Example for VirtualBox:
103 | #
104 | # config.vm.provider "virtualbox" do |vb|
105 | # # Don't boot with headless mode
106 | # vb.gui = true
107 | #
108 | # # Use VBoxManage to customize the VM. For example to change memory:
109 | # vb.customize ["modifyvm", :id, "--memory", "1024"]
110 | # end
111 | #
112 | # View the documentation for the provider you're using for more
113 | # information on available options.
114 |
115 | # Enable provisioning with CFEngine. CFEngine Community packages are
116 | # automatically installed. For example, configure the host as a
117 | # policy server and optionally a policy file to run:
118 | #
119 | # config.vm.provision "cfengine" do |cf|
120 | # cf.am_policy_hub = true
121 | # # cf.run_file = "motd.cf"
122 | # end
123 | #
124 | # You can also configure and bootstrap a client to an existing
125 | # policy server:
126 | #
127 | # config.vm.provision "cfengine" do |cf|
128 | # cf.policy_server_address = "10.0.2.15"
129 | # end
130 |
131 | # Enable provisioning with Puppet stand alone. Puppet manifests
132 | # are contained in a directory path relative to this Vagrantfile.
133 | # You will need to create the manifests directory and a manifest in
134 | # the file default.pp in the manifests_path directory.
135 | #
136 | # config.vm.provision "puppet" do |puppet|
137 | # puppet.manifests_path = "manifests"
138 | # puppet.manifest_file = "site.pp"
139 | # end
140 |
141 | # Enable provisioning with chef solo, specifying a cookbooks path, roles
142 | # path, and data_bags path (all relative to this Vagrantfile), and adding
143 | # some recipes and/or roles.
144 | #
145 | # config.vm.provision "chef_solo" do |chef|
146 | # chef.cookbooks_path = "../my-recipes/cookbooks"
147 | # chef.roles_path = "../my-recipes/roles"
148 | # chef.data_bags_path = "../my-recipes/data_bags"
149 | # chef.add_recipe "mysql"
150 | # chef.add_role "web"
151 | #
152 | # # You may also specify custom JSON attributes:
153 | # chef.json = { mysql_password: "foo" }
154 | # end
155 |
156 | # Enable provisioning with chef server, specifying the chef server URL,
157 | # and the path to the validation key (relative to this Vagrantfile).
158 | #
159 | # The Opscode Platform uses HTTPS. Substitute your organization for
160 | # ORGNAME in the URL and validation key.
161 | #
162 | # If you have your own Chef Server, use the appropriate URL, which may be
163 | # HTTP instead of HTTPS depending on your configuration. Also change the
164 | # validation key to validation.pem.
165 | #
166 | # config.vm.provision "chef_client" do |chef|
167 | # chef.chef_server_url = "https://api.opscode.com/organizations/ORGNAME"
168 | # chef.validation_key_path = "ORGNAME-validator.pem"
169 | # end
170 | #
171 | # If you're using the Opscode platform, your validator client is
172 | # ORGNAME-validator, replacing ORGNAME with your organization name.
173 | #
174 | # If you have your own Chef Server, the default validation client name is
175 | # chef-validator, unless you changed the configuration.
176 | #
177 | # chef.validation_client_name = "ORGNAME-validator"
178 | end
179 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # BBS-provision-cookbook
2 |
3 | ## Table of Contents
4 |
5 | - [Background](#Background)
6 | - [Setup Chef](#SetupChef)
7 | - [The Chef server](#TheChefServer)
8 | - [Upload cookbook](#UploadCookbook)
9 | - [Resolve dependencies](#ResolveDependencies)
10 | - [Configure the node](#ConfigureTheNode)
11 | - [Bootstrapping](#Bootstrapping)
12 | - [Data bags](#DataBags)
13 | - [Run the recipe](#RunTheRecipe)
14 | - [Redefine the central build node](#RedefineTheCentralBuildNode)
15 | - [Crontabs](#Crontabs)
16 | - [Further development](#FurtherDevelopment)
17 | - [Updating node configuration](#UpdatingNodeConfiguration)
18 | - [Adding a new recipe](#AddingANewRecipe)
19 |
20 |
21 |
22 | ## Background
23 |
24 | The BBS-provision-cookbook is used to configure test machines
25 | when rolling out new Bioconductor Build System (BBS) features.
26 | It is not currently run on the primary build machines.
27 |
28 | As of July 2018 the default recipe takes about 1 hour and 10 minutes
29 | to complete.
30 |
31 | Some terminology:
32 |
33 | * workstation:
34 |
35 | The computer from which you author your cookbooks and
36 | administer your network.
37 |
38 | * Chef server:
39 |
40 | Acts as a central repository for cookbooks as well as for
41 | information about nodes it manages.
42 |
43 | * node:
44 |
45 | Any computer managed by a Chef server. Every node has the Chef
46 | client installed on it.
47 |
48 |
49 | ## Setup Chef
50 |
51 | The normal Chef workflow involves managing servers remotely from a local
52 | workstation. Logging into the node to manage it directly is important when
53 | trouble shooting. The Chef Development Kit provides tools that enable node
54 | management both remotely and from a local workstation.
55 |
56 | If you are not set up with a local Chef installation, follow one of the
57 | tutorials at
58 |
59 | https://learn.chef.io/#/modules
60 | https://learn.chef.io/modules/learn-the-basics/ubuntu/aws/set-up-a-machine-to-manage#/
61 |
62 |
63 | ## The Chef server
64 |
65 | We use a hosted Chef server at https://manage.chef.io/. You should have
66 | an account and be able to log in to see the nodes and cookbooks.
67 |
68 | The Chef server acts as a configuration hub. It stores cookbooks, polices
69 | applied to nodes and other metadata. Nodes use the `chef-client` executable
70 | to query the Chef server for configuration details. Configuration work
71 | is then done on the nodes (vs the server).
72 |
73 | All cookbooks, data and dependencies needed by a Chef recipe must be present
74 | on the Chef server so they are accessible by the node.
75 |
76 |
77 | ### Upload cookbook
78 |
79 | The BBS-provision-cookbook should already be uploaded to the server. To see a
80 | list of all cookbooks from the command line:
81 |
82 | knife cookbook list
83 |
84 | Making any necessary local changes to the cookbook, bump the version in
85 | metadata.rb and upload:
86 |
87 | knife cookbook upload BBS-provision-cookbook
88 |
89 | Confirm the new version is on the server:
90 |
91 | knife cookbook list
92 |
93 |
94 | ### Resolve dependencies
95 |
96 | Chef itself does not resolve cookbook dependencies. When a recipe is run
97 | on a node, all dependencies are assumed to either be installed on the Chef
98 | server or available from the official Chef Supermarket.
99 |
100 | Berkshelf is a dependency manager for Chef cookbooks and is included in
101 | the Chef DK. It (or something similar) is needed to get cookbook dependencies
102 | from locations other than the Chef Supermarket such as GitHub or a local path
103 | and upload them to the Chef server.
104 |
105 | If you don't have Chef DK installed you can get Berkshelf with
106 |
107 | gem install berkshelf
108 |
109 | The Berksfile is the most critical component of Berkshelf and is modeled
110 | after Bundler's Gemfile. The file contains
111 | 3 primary settings:
112 |
113 | source : Location of cookbooks and dependencies if not available locally with Berkshelf.
114 | metadata : Directive to read metadata.rb.
115 | cookbook : List of all the cookbooks/dependencies required.
116 |
117 | Calling `berks install` downloads all cookbook dependencies to the local
118 | workstation and `berks upload` uploads them to the Chef server. The `berks`
119 | command must be run at the same level as the Berksfile.
120 |
121 | Resolve dependencies on other cookbooks with Berkshelf:
122 |
123 | cd BBS-provision-cookbook/
124 | berks install
125 | berks upload --no-freeze
126 |
127 |
128 | ## Configure the node
129 |
130 | This example uses an AWS EC2 instance as the node.
131 |
132 | Launch an AWS EC2 instance with at least 4 cpus and 16 GB of memory. Increase
133 | the disk storage to 20 or 50 GB depending on what type of testing will be done.
134 | Open ports 22, 80 and 443.
135 |
136 | Once the instance is running it can be configured manually by logging
137 | into the EC2 or remotely from your local workstation. These instructions
138 | describe a remote configuration. For a manual approach, see this page:
139 |
140 | https://learn.chef.io/modules/learn-the-basics/ubuntu/aws/set-up-a-machine-to-manage#/
141 |
142 |
143 | ### Bootstrapping
144 |
145 | Configure Chef on the node:
146 |
147 | knife bootstrap 34.207.158.122 --ssh-user ubuntu --sudo --identity-file /home/vobencha/.ssh/vobencha-keypair.pem --node-name val-test-malbec
148 |
149 | Confirm the node was associated with the server:
150 |
151 | knife node list
152 | knife node show val-test-malbec
153 |
154 | At this point the run list on the node is empty. Run list options in the
155 | BBS-provision-cookbook/test/integration/roles/ directory.
156 |
157 | Add the run list:
158 |
159 | knife node run_list add val-test-malbec 'recipe[BBS-provision-cookbook::default],role[bbs_devel_linux]'
160 |
161 | Confirm the run list was added:
162 |
163 | knife node show val-test-malbec
164 |
165 | The run list involves a "role". There are several ways to handle 'special
166 | cases' and using roles is one of them. There are roles for release and devel -
167 | each set a variable value on the node which is accessed by the cookbook recipe
168 | during run time. Use 'role list' and 'role show' to see the roles defined on
169 | the server.
170 |
171 | knife role list
172 | knife role show bbs_devel_linux
173 |
174 | The above steps can be combined into one. Running them separately (as above)
175 | has the advantage of confirming each step as you go and makes troubleshooting
176 | easier. To combine them, these would be the one-liners:
177 |
178 | Key authentication:
179 |
180 | knife bootstrap 34.207.158.122 --ssh-user ubuntu --identity-file ~/.ssh/vobencha-keypair.pem --sudo --use-sudo-password --node-name val-test-malbec --run-list 'role[bbs_devel_linux],recipe[BBS-provision-cookbook]'
181 |
182 | Or when password authentication is used:
183 |
184 | knife bootstrap 34.207.158.122 --ssh-user ubuntu --ssh-password 'PASS' --sudo --use-sudo-password --node-name val-test-malbec --run-list 'role[bbs_devel_linux],recipe[BBS-provision-cookbook]'
185 |
186 |
187 | ### Data bags
188 |
189 | A data bag is a global variable that is stored as JSON and is accessible
190 | from a Chef server. The bags are indexed for searching and can be loaded by a
191 | recipe or accessed during a search. We use these to store keys.
192 |
193 | Inside a data bag are data bag items. Each item has been encrypted with a
194 | secret key. To use these data in a recipe the items must be on the
195 | Chef server and the encryption key must be on the Chef client.
196 |
197 | * Data bags on the Chef server:
198 |
199 | The data bags in BBS-provision-cookbook were uploaded when we invoked
200 | `knife upload ...`. Confirm the BBS data bag is on the server:
201 |
202 | knife data bag list
203 | knife data bag show BBS
204 |
205 | * Encryption key to Chef client:
206 |
207 | The encryption key is in the Google Doc "Credentials for Bioconductor
208 | Resources". Copy the key to /etc/chef/encrypted_data_bag_secret file on the
209 | client node. Permissions on the encrypted_data_bag_secret file should be 600.
210 |
211 |
212 | ## Run the recipe
213 |
214 | The recipe can be run on the node by invoking the `chef-client` executable.
215 | This was installed on the node during the bootstrap stage and invoking it
216 | forces execution of the run list.
217 |
218 | knife ssh 'name:val-test-malbec' 'sudo chef-client' --ssh-user ubuntu --ssh-identity-file ~/.ssh/vobencha-keypair.pem --attribute cloud.public_ipv4
219 |
220 |
221 | ## Redefine the central build node
222 |
223 | This Chef recipe configures a Linux build node and pulls in the BBS code base
224 | from GitHub. When we configure a node with this recipe we (usually) want the
225 | newly configured node to play the role of the central builder.
226 |
227 | To accomplish this, the `BBS_CENTRAL_RHOST` and `BBS_MEAT0_RHOST` variables in
228 | the BBS code on the node need to reference the node's hostname instead of the
229 | canonical central builder.
230 |
231 | In the context of the regular build system, the master builder
232 | and all machines participating in the builds have `BBS_CENTRAL_RHOST`
233 | and `BBS_MEAT0_RHOST` set to either malbec1.bioconductor.org or
234 | malbec2.bioconductor.org.
235 |
236 | The Chef recipe configures a node with hostname malbec1 or malbec2, without
237 | the `.bioconductor.org` extension.
238 |
239 | Confirm the hostname of the newly configured node. For example, we'll say
240 | this returns malbec1:
241 |
242 | echo $HOSTNAME
243 |
244 | Set `BBS_CENTRAL_RHOST` to "127.0.0.1" instead of malbec1.bioconductor.org
245 | in these config.sh files:
246 |
247 | /home/biocbuild/BBS/3.8/config.sh
248 | /home/biocadmin/BBS/3.8/config.sh
249 |
250 | Make sure the following works:
251 |
252 | cd ~/BBS/3.8/bioc/malbec1
253 | . config.sh
254 | /usr/bin/ssh -qi /home/biocbuild/.BBS/id_rsa $BBS_CENTRAL_RHOST
255 |
256 | Set `BBS_MEAT0_RHOST` to malbec1 instead of malbec1.bioconductor.org
257 | in the appropriate sub-build config file:
258 |
259 | /home/biocbuild/BBS/3.8/bioc/config.sh
260 | /home/biocbuild/BBS/3.8/bioc-longtests/config.sh
261 | /home/biocbuild/BBS/3.8/data-experiment/config.sh
262 | /home/biocbuild/BBS/3.8/workflows/config.sh
263 |
264 | To prevent posting the build report to master.bioconductor.org comment out
265 | the rsync line in the postrun script:
266 |
267 | #/usr/bin/rsync -ave 'ssh -o StrictHostKeyChecking=no' "$BBS_REPORT_PATH/" "$BBS_PUBLISHED_REPORT_DEST_DIR/"
268 |
269 | For extra safety comment out BBS_PUBLISHED_REPORT_DEST_DIR in
270 |
271 | /home/biocbuild/BBS/3.8/config.sh
272 |
273 | After running postrun, the build report will be available at:
274 |
275 | http://publicIP/BBS/3.8/report/ # must be http (not https)
276 |
277 |
278 |
279 | ## Crontabs
280 |
281 | The BBS-provision-cookbook has a separate recipe for generating crontabs for
282 | the `biocbuild` and `biocadmin` users. This is not run as part of the default
283 | recipe.
284 |
285 | The cron tasks details are specified in
286 | BBS-provision-cookbook/attributes/default.rb. If the
287 | BBS-provision-cookbook::crontab recipe is run, be sure to modify the BBS config
288 | files on the test nodes to avoid unwanted communication with the primary build
289 | machines.
290 |
291 |
292 | ## Further Development
293 |
294 |
295 | ### Updating node configuration
296 |
297 | When developing, make changes to the local cookbook then uploaded to the
298 | Chef server:
299 |
300 | knife cookbook upload BBS-provision-cookbook
301 |
302 | Re-run the cookbook on the node:
303 |
304 | knife ssh 'name:val-test-malbec' 'sudo chef-client' --ssh-user ubuntu --ssh-identity-file ~/.ssh/vobencha-keypair.pem --attribute cloud.public_ipv4
305 |
306 | Good practice is to bump the version in metadata.rb for each substantial
307 | change and commit to GitHub.
308 |
309 |
310 | ### Adding a new recipe
311 |
312 | New recipes can be generated with `chef generate`, e.g., to create a
313 | recipe "crontab":
314 |
315 | chef generate recipe crontab
316 |
317 | Add a reference to the new recipe in `recipes/default.rb`:
318 |
319 | include_recipe 'BBS-provision-cookbook::crontab'
320 |
321 | Alternatively, add it to the node's runlist on the Chef server:
322 |
323 | knife node run_list add val-test-malbec 'recipe[BBS-provision-cookbook::crontab]'
324 |
--------------------------------------------------------------------------------
/recipes/default.rb:
--------------------------------------------------------------------------------
1 | # __END__
2 | # comment out the above but don't remove it
3 | # include_recipe 'cron'
4 | include_recipe 'apt'
5 | resources(execute: 'apt-get update').run_action(:run)
6 |
7 |
8 | package "language-pack-en"
9 |
10 | if node["reldev"] == "devel"
11 | reldev = :dev
12 | elsif node["reldev"] == "release"
13 | reldev = :rel
14 | else
15 | raise "are the bbs_devel and bbs_release roles defined?"
16 | end
17 |
18 |
19 | bioc_version = node['bioc_version'][reldev]
20 | r_version = node['r_version'][reldev]
21 | execute "change time zone" do
22 | user "root"
23 | command "rm -f /etc/localtime && ln -sf /usr/share/zoneinfo/#{node['time_zone']} /etc/localtime"
24 | not_if "file /etc/localtime | grep -q #{node['time_zone']}"
25 | end
26 |
27 | control_group 'time zone' do
28 | control 'should be set properly' do
29 | describe command("file /etc/localtime") do
30 | its(:stdout) { should_not match /UTC|GMT/}
31 | end
32 | end
33 | end
34 |
35 |
36 |
37 | file "/etc/hostname" do
38 | content node['desired_hostname'][reldev]
39 | mode "0644"
40 | end
41 |
42 | execute "set hostname" do
43 | command "hostname $(cat /etc/hostname)"
44 | not_if "hostname | grep -q $(cat /etc/hostname)"
45 | end
46 |
47 |
48 |
49 | execute "fix ec2 hostname bs" do
50 | command %Q(echo "127.0.0.1 $(hostname)" >> /etc/hosts)
51 | not_if "grep -q $(hostname) /etc/hosts"
52 | end
53 |
54 |
55 | user "biocbuild" do
56 | manage_home true
57 | home "/home/biocbuild"
58 | shell "/bin/bash"
59 | action :create
60 | end
61 |
62 |
63 | control_group 'biocbuild' do
64 | control 'biocbuild user' do
65 | it 'should exist' do
66 | expect(file('/etc/passwd')).to contain(/biocbuild/)
67 | expect(file('/home/biocbuild')).to exist
68 | expect(file('/home/biocbuild')).to be_directory
69 | expect(file('/home/biocbuild')).to be_owned_by('biocbuild')
70 | end
71 | end
72 | end
73 |
74 |
75 | bbsdir = "/home/biocbuild/bbs-#{node['bioc_version'][reldev]}-bioc"
76 |
77 | directory bbsdir do
78 | owner "biocbuild"
79 | group "biocbuild"
80 | mode "0755"
81 | action :create
82 | end
83 |
84 |
85 |
86 | control_group "bbsdir group" do
87 | control bbsdir do
88 | it 'should exist' do
89 | expect(file(bbsdir)).to exist
90 | expect(file(bbsdir)).to be_directory
91 | expect(file(bbsdir)).to be_owned_by('biocbuild')
92 | end
93 | end
94 | end
95 |
96 |
97 | directory "/home/biocbuild/.ssh" do
98 | owner "biocbuild"
99 | group "biocbuild"
100 | mode "0755"
101 | action :create
102 | end
103 |
104 | directory "/home/biocbuild/.BBS" do
105 | owner "biocbuild"
106 | group "biocbuild"
107 | mode "0755"
108 | action :create
109 | end
110 |
111 | %w(log NodeInfo meat R).each do |dir|
112 | directory "#{bbsdir}/#{dir}" do
113 | owner "biocbuild"
114 | group "biocbuild"
115 | mode "0755"
116 | action :create
117 | end
118 | end
119 |
120 | %W(src public_html public_html/BBS public_html/BBS/#{node['bioc_version'][reldev]}).each do |dir|
121 | directory "/home/biocbuild/#{dir}" do
122 | owner "biocbuild"
123 | group "biocbuild"
124 | mode "0755"
125 | action :create
126 | end
127 | end
128 |
129 |
130 |
131 | # data experiment
132 | dataexpdir = bbsdir.sub(/bioc$/, "data-experiment")
133 |
134 | directory dataexpdir do
135 | action :create
136 | owner "biocbuild"
137 | group "biocbuild"
138 | end
139 |
140 |
141 | %w(log NodeInfo meat STAGE2_tmp).each do |dir|
142 | directory "#{dataexpdir}/#{dir}" do
143 | owner "biocbuild"
144 | group "biocbuild"
145 | mode "0755"
146 | action :create
147 | end
148 | end
149 |
150 |
151 | # workflows
152 | workflowdir = bbsdir.sub(/bioc$/, "workflows")
153 |
154 | directory workflowdir do
155 | action :create
156 | owner "biocbuild"
157 | group "biocbuild"
158 | end
159 |
160 |
161 | %w(log NodeInfo meat STAGE2_tmp).each do |dir|
162 | directory "#{workflowdir}/#{dir}" do
163 | owner "biocbuild"
164 | group "biocbuild"
165 | mode "0755"
166 | action :create
167 | end
168 | end
169 |
170 |
171 | package "subversion"
172 |
173 | control_group 'package subversion group' do
174 | control 'package subversion' do
175 | it 'should be installed' do
176 | expect(package('subversion')).to be_installed
177 | end
178 | end
179 | end
180 |
181 |
182 |
183 | base_url = "https://hedgehog.fhcrc.org/bioconductor"
184 | base_data_url = "https://hedgehog.fhcrc.org/bioc-data"
185 | if reldev == :dev
186 | branch = 'trunk'
187 | else
188 | branch = "branches/RELEASE_#{node['bioc_version'][reldev].sub(".", "_")}"
189 | end
190 |
191 |
192 | %w(ack-grep libnetcdf-dev libhdf5-serial-dev sqlite libfftw3-dev libfftw3-doc
193 | libopenbabel-dev fftw3 fftw3-dev pkg-config xfonts-100dpi xfonts-75dpi
194 | libopenmpi-dev openmpi-bin mpi-default-bin openmpi-common
195 | libexempi3 openmpi-doc texlive-science python-mpi4py
196 | texlive-bibtex-extra texlive-fonts-extra fortran77-compiler gfortran
197 | libreadline-dev libx11-dev libxt-dev texinfo apache2 libxml2-dev
198 | libcurl4-openssl-dev libcurl4-nss-dev xvfb libpng12-dev
199 | libjpeg62-dev libcairo2-dev libcurl4-gnutls-dev libtiff5-dev
200 | tcl8.5-dev tk8.5-dev libicu-dev libgsl2 libgsl0-dev
201 | libgtk2.0-dev gcj-4.8 openjdk-8-jdk texlive-latex-extra
202 | texlive-fonts-recommended libgl1-mesa-dev libglu1-mesa-dev
203 | htop libgmp3-dev imagemagick unzip libhdf5-dev libncurses-dev libbz2-dev
204 | libxpm-dev liblapack-dev libv8-3.14-dev libperl-dev
205 | libarchive-extract-perl libfile-copy-recursive-perl libcgi-pm-perl tabix
206 | libdbi-perl libdbd-mysql-perl ggobi libgtkmm-2.4-dev libssl-dev byacc
207 | automake libmysqlclient-dev postgresql-server-dev-all
208 | firefox graphviz python-pip libxml-simple-perl texlive-lang-european
209 | libmpfr-dev libudunits2-dev tree python-yaml libmodule-build-perl gdb biber
210 | python-numpy python-pandas python-h5py
211 | libprotoc-dev libprotobuf-dev protobuf-compiler libapparmor-dev libgeos-dev
212 | librdf0-dev libmagick++-dev libsasl2-dev libpoppler-cpp-devel
213 | texlive-pstricks texlive-pstricks-doc texlive-luatex
214 | libglpk-dev libgdal-dev ocl-icd-opencl-dev
215 | ).each do |pkg|
216 | package pkg do
217 | action :install
218 | end
219 | end
220 |
221 | package 'libnetcdf-dev'
222 |
223 | # Some packages are not installed by the above, even though the output
224 | # suggests they are. See
225 | # https://discourse.chef.io/t/package-not-installed-by-package-resource-on-ubuntu/8456
226 | # So explicitly install using apt-get:
227 |
228 | # comment this out for now for testing
229 | # execute "install libnetcdf-dev" do
230 | # command "apt-get install -y libnetcdf-dev"
231 | # not_if "dpkg --get-selections libnetcdf-dev|grep -q libnetcdf-dev"
232 | # end
233 |
234 | package 'git'
235 |
236 | # install a newer version of pandoc than available from the Ubuntu package repo
237 | pandoc_deb = node['pandoc_url'].split("/").last
238 |
239 | remote_file "/tmp/#{pandoc_deb}" do
240 | source node['pandoc_url']
241 | end
242 |
243 | dpkg_package "pandoc" do
244 | source "/tmp/#{pandoc_deb}"
245 | end
246 |
247 | execute "install jupyter" do
248 | command "pip install jupyter"
249 | not_if "which jupyter | grep -q jupyter"
250 | end
251 |
252 | execute "install ipython" do
253 | command "pip install ipython==4.1.2"
254 | not_if "pip freeze | grep -q ipython"
255 | end
256 |
257 | execute "install nbconvert" do
258 | command "pip install nbconvert==4.1.0"
259 | not_if "pip freeze | grep -q nbconvert"
260 | end
261 |
262 | execute "install h5pyd" do
263 | command "pip install h5pyd"
264 | not_if "pip freeze | grep -q h5pyd"
265 | end
266 |
267 | execute "install scikit-learn" do
268 | command "pip install scikit-learn"
269 | not_if "pip freeze | grep -q scikit-learn"
270 | end
271 |
272 | execute "install tensorflow" do
273 | command "pip install tensorflow"
274 | not_if "pip freeze | grep -q tensorflow"
275 | end
276 |
277 | execute "install cwltool" do
278 | command "pip install cwltool"
279 | not_if "pip freeze | grep -q cwltool"
280 | end
281 |
282 | execute "install mofapy" do
283 | command "pip install mofapy"
284 | not_if "pip freeze | grep -q mofapy"
285 | end
286 |
287 | argtable_tarball = node['argtable_url'].split('/').last
288 | argtable_dir = argtable_tarball.sub(".tar.gz", "")
289 |
290 | remote_file "/tmp/#{argtable_tarball}" do
291 | source node['argtable_url']
292 | end
293 |
294 | execute "build argtable" do
295 | command "tar zxf #{argtable_tarball.split('/').last} && cd #{argtable_dir} && ./configure && make && make install"
296 | cwd "/tmp"
297 | not_if {File.exists? "/tmp/#{argtable_dir}/config.log"}
298 | end
299 |
300 | clustalo_tarball = node['clustalo_url'].split('/').last
301 | clustalo_dir = clustalo_tarball.sub(".tar.gz", "")
302 |
303 | remote_file "/tmp/#{clustalo_tarball}" do
304 | source node['clustalo_url']
305 | end
306 |
307 | execute "build clustalo" do
308 | command "tar zxf #{clustalo_tarball} && cd #{clustalo_dir} && ./configure && make && make install"
309 | not_if "which clustalo | grep -q clustalo"
310 | cwd "/tmp"
311 | end
312 |
313 | gitlfs_dir = node['git-lfs_dir']
314 | gitlfs_tarball = "#{gitlfs_dir}.tar.gz"
315 |
316 | remote_file "/tmp/#{gitlfs_tarball}" do
317 | source node['git-lfs_url']
318 | end
319 |
320 | execute "install git-lfs" do
321 | command "tar zxf #{gitlfs_tarball} && cd #{gitlfs_dir} && ./install.sh"
322 | not_if "which git-lfs | grep -q git-lfs"
323 | cwd "/tmp"
324 | end
325 |
326 | git "/home/biocbuild/BBS" do
327 | repository node['bbs_repos']
328 | revision node['bbs_branch']
329 | user 'biocbuild'
330 | group 'biocbuild'
331 | end
332 |
333 | directory "#{bbsdir}/rdownloads" do
334 | action :create
335 | owner 'biocbuild'
336 | group 'biocbuild'
337 | end
338 |
339 | remote_file "#{bbsdir}/rdownloads/#{node['r_url'][reldev].split("/").last}" do
340 | source node['r_url'][reldev]
341 | owner 'biocbuild'
342 | group 'biocbuild'
343 | end
344 |
345 | execute "untar R" do
346 | command "tar zxf #{bbsdir}/rdownloads/#{node['r_url'][reldev].split("/").last}"
347 | user "biocbuild"
348 | group "biocbuild"
349 | cwd "#{bbsdir}/rdownloads"
350 | not_if {File.exists? "#{bbsdir}/rdownloads/#{node['r_src_dir'][reldev]}"}
351 | end
352 |
353 |
354 | execute "build R" do
355 | command "#{bbsdir}/rdownloads/#{node['r_src_dir'][reldev]}/configure --enable-R-shlib && make"
356 | user "biocbuild"
357 | group "biocbuild"
358 | cwd "#{bbsdir}/R"
359 | not_if {File.exists? "#{bbsdir}/R/Makefile"}
360 | end
361 |
362 | execute "set R flags" do
363 | command "/home/biocbuild/BBS/utils/R-fix-flags.sh"
364 | user "biocbuild"
365 | group "biocbuild"
366 | cwd "#{bbsdir}/R/etc"
367 | not_if {File.exists? "#{bbsdir}/R/etc/Makeconf.original"}
368 | end
369 |
370 | execute "set up arp alias" do
371 | command %Q(echo 'alias arp="export PATH=$PATH:$HOME/bbs-#{node['bioc_version'][reldev]}-bioc/R/bin"' >> /home/biocbuild/.bash_profile)
372 | cwd "/home/biocbuild"
373 | user "biocbuild"
374 | group "biocbuild"
375 | not_if "grep -q arp /home/biocbuild/.bash_profile"
376 | end
377 |
378 | execute "install BiocInstaller" do
379 | command %Q(#{bbsdir}/R/bin/R -e "source('https://bioconductor.org/biocLite.R')")
380 | user "biocbuild"
381 | group "biocbuild"
382 | not_if {File.exists? "#{bbsdir}/R/library/BiocInstaller"}
383 | end
384 |
385 | if reldev == :dev
386 | execute "run useDevel()" do
387 | command %Q(#{bbsdir}/R/bin/R -e "BiocInstaller::useDevel()")
388 | user "biocbuild"
389 | group "biocbuild"
390 | not_if %Q(#{bbsdir}/R/bin/R --slave -q -e "BiocInstaller:::IS_USER" | grep -q FALSE)
391 | end
392 | end
393 |
394 | link "/var/www/html/BBS" do
395 | to "/home/biocbuild/public_html/BBS"
396 | end
397 |
398 |
399 |
400 | # biocadmin
401 |
402 | user "biocadmin" do
403 | manage_home true
404 | home "/home/biocadmin"
405 | shell "/bin/bash"
406 | action :create
407 | end
408 |
409 |
410 | dirs = %W(
411 | bin InstalledPkgs tmp rdownloads
412 | PACKAGES/#{bioc_version}/biocViews
413 | PACKAGES/#{bioc_version}/bioc/src/contrib
414 | PACKAGES/#{bioc_version}/bioc/bin/windows/contrib/#{r_version}
415 | PACKAGES/#{bioc_version}/bioc/bin/macosx/contrib/#{r_version}
416 | PACKAGES/#{bioc_version}/bioc/bin/macosx/mavericks/contrib/#{r_version}
417 | PACKAGES/#{bioc_version}/data/experiment/src/contrib
418 | PACKAGES/#{bioc_version}/data/experiment/bin/windows/contrib/#{r_version}
419 | PACKAGES/#{bioc_version}/data/experiment/bin/macosx/contrib/#{r_version}
420 | PACKAGES/#{bioc_version}/data/experiment/bin/macosx/mavericks/contrib/#{r_version}
421 | PACKAGES/#{bioc_version}/data/annotation/src/contrib
422 | PACKAGES/#{bioc_version}/data/annotation/bin/windows/contrib/#{r_version}
423 | PACKAGES/#{bioc_version}/data/annotation/bin/macosx/contrib/#{r_version}
424 | PACKAGES/#{bioc_version}/data/annotation/bin/macosx/mavericks/contrib/#{r_version}
425 | PACKAGES/#{bioc_version}/extra/src/contrib
426 | PACKAGES/#{bioc_version}/extra/bin/windows/contrib/#{r_version}
427 | PACKAGES/#{bioc_version}/extra/bin/macosx/contrib/#{r_version}
428 | PACKAGES/#{bioc_version}/extra/bin/macosx/mavericks/contrib/#{r_version}
429 | PACKAGES/#{bioc_version}/workflows/src/contrib
430 | PACKAGES/#{bioc_version}/workflows/bin/windows/contrib/#{r_version}
431 | PACKAGES/#{bioc_version}/workflows/bin/macosx/contrib/#{r_version}
432 | PACKAGES/#{bioc_version}/workflows/bin/macosx/mavericks/contrib/#{r_version}
433 | cron.log/#{bioc_version}
434 | )
435 |
436 | def parent_dirs(dir)
437 | path = ""
438 | dir.split("/").collect{|d| path = path.empty? ? d : path+"/"+d; path}
439 | end
440 |
441 | ## explicitly create parent directories in order to properly set owner/group
442 | dirs.collect{|dir| parent_dirs(dir)}.flatten.uniq.each do |dir|
443 | directory "/home/biocadmin/#{dir}" do
444 | action :create
445 | owner "biocadmin"
446 | group "biocadmin"
447 | end
448 | end
449 |
450 | git "/home/biocadmin/BBS" do
451 | user "biocadmin"
452 | group "biocadmin"
453 | repository node['bbs_repos']
454 | revision node['bbs_branch']
455 | end
456 |
457 | link "/home/biocadmin/propagation-pipe" do
458 | to "/home/biocadmin/BBS/propagation-pipe"
459 | owner "biocadmin"
460 | group "biocadmin"
461 | end
462 |
463 | %W(bioc data/annotation data/experiment extra).each do |dir|
464 | link "/home/biocadmin/PACKAGES/#{bioc_version}/#{dir}/bin/windows64" do
465 | to "windows"
466 | owner "biocadmin"
467 | group "biocadmin"
468 | end
469 | end
470 |
471 | # install R
472 | # install knitcitations
473 | # install all pkgs in ~/InstalledPkgs
474 |
475 |
476 |
477 |
478 | remote_file "/home/biocadmin/rdownloads/#{node['r_url'][reldev].split("/").last}" do
479 | source node['r_url'][reldev]
480 | owner "biocadmin"
481 | group "biocadmin"
482 | end
483 |
484 | execute "untar R" do
485 | command "tar zxf /home/biocadmin/rdownloads/#{node['r_url'][reldev].split("/").last} && mv #{node['r_src_dir'][reldev]} /home/biocadmin/R-#{r_version}"
486 | user "biocadmin"
487 | group "biocadmin"
488 | cwd "/home/biocadmin/rdownloads"
489 | not_if {File.exists? "/home/biocadmin/R-#{r_version}"}
490 | end
491 |
492 |
493 |
494 | execute "build R" do
495 | command "./configure --enable-R-shlib && make"
496 | user "biocadmin"
497 | group "biocadmin"
498 | cwd "/home/biocadmin/R-#{r_version}/"
499 | not_if {File.exists? "/home/biocadmin/R-#{r_version}/config.log"}
500 | end
501 |
502 | # should really install these from ~/InstalledPkgs but this is easier.
503 | execute "install pkgs needed by biocadmin" do
504 | user "biocadmin"
505 | group "biocadmin"
506 | command %Q(/home/biocadmin/R-#{r_version}/bin/R -e "source('https://bioconductor.org/biocLite.R');biocLite(c('biocViews','DynDoc','graph','knitr','knitcitations'))")
507 | not_if {File.exists? "/home/biocadmin/R-#{r_version}/library/knitcitations"}
508 | end
509 |
510 | link "/home/biocadmin/bin/R-#{r_version}" do
511 | owner "biocadmin"
512 | group "biocadmin"
513 | to "/home/biocadmin/R-#{r_version}/bin/R"
514 | end
515 |
516 |
517 | # ROOT
518 |
519 | remote_file "/tmp/#{node['root_url'][reldev].split("/").last}" do
520 | source node['root_url'][reldev]
521 | end
522 |
523 | directory "/tmp/rootbuild" do
524 | action :create
525 | end
526 |
527 | execute "build root" do
528 | cwd "/tmp/rootbuild"
529 | command "tar zxf /tmp/#{node['root_url'][reldev].split("/").last} && cd root && ./configure --prefix=/usr/local/root && make && make install"
530 | not_if {File.exists? "/tmp/rootbuild/root"}
531 | end
532 |
533 |
534 | file "/etc/ld.so.conf.d/ROOT.conf" do
535 | content "/usr/local/root/lib/root"
536 | end
537 |
538 | execute "ldconfig" do
539 | command "ldconfig"
540 | end
541 |
542 | execute "add root to path" do
543 | command "echo 'export PATH=$PATH:/usr/local/root/bin' >> /etc/profile"
544 | not_if "grep -q /usr/local/root/bin /etc/profile"
545 | end
546 |
547 | execute "add rootsys" do
548 | command "echo 'export ROOTSYS=/usr/local/root' >> /etc/profile"
549 | not_if "grep -q ROOTSYS /etc/profile"
550 | end
551 |
552 | # jags
553 |
554 | remote_file "/tmp/#{node['jags_url'][reldev].split('/').last}" do
555 | source node['jags_url'][reldev]
556 | end
557 |
558 | execute "build jags" do
559 | command "tar zxf #{node['jags_url'][reldev].split('/').last} && cd #{node['jags_dir'][reldev]} && ./configure && make && make install"
560 | cwd "/tmp"
561 | not_if {File.exists? "/tmp/#{node['jags_dir'][reldev]}/config.log"}
562 | end
563 |
564 | # libsbml
565 |
566 | remote_file "/tmp/#{node['libsbml_url'].split('/').last}" do
567 | source node['libsbml_url']
568 | end
569 |
570 | execute "build libsbml" do
571 | command "tar zxf #{node['libsbml_url'].split('/').last} && cd #{node['libsbml_dir']} && ./configure --enable-layout && make && make install"
572 | cwd "/tmp"
573 | not_if {File.exists? "/tmp/#{node['libsbml_dir']}/config.log"}
574 | end
575 |
576 | # Vienna RNA
577 |
578 | remote_file "/tmp/#{node['vienna_rna_dir']}.tar.gz" do
579 | source node["vienna_rna_url"]
580 | end
581 |
582 | execute "build ViennaRNA" do
583 | command "tar zxf #{node['vienna_rna_dir']}.tar.gz && cd #{node['vienna_rna_dir']}/ && ./configure && make && make install"
584 | cwd "/tmp"
585 | not_if {File.exists? "/tmp/#{node['vienna_rna_dir']}/config.log"}
586 | end
587 |
588 | # ensemblVEP
589 |
590 | remote_file "/tmp/#{node['vep_dir'][reldev]}.zip" do
591 | source node['vep_url'][reldev]
592 | end
593 |
594 | execute "install VEP" do
595 | command "unzip #{node['vep_dir'][reldev]} && mv #{node['vep_dir'][reldev]} /usr/local/ && cd /usr/local/#{node['vep_dir'][reldev]} && perl INSTALL.pl --NO_HTSLIB -a a"
596 | cwd "/tmp"
597 | not_if {File.exists? "/usr/local/#{node['vep_dir'][reldev]}"}
598 | end
599 |
600 | # add /usr/local/vep to path
601 |
602 | execute "add vep to path" do
603 | command "echo 'export PATH=$PATH:/usr/local/vep' >> /etc/profile"
604 | not_if "grep -q vep /etc/profile"
605 | end
606 |
607 | # TODO s:
608 | # cron - pointer in crontab to crond
609 | # ssh keys
610 | # latex - enablewrite18 and changes below
611 | # rgtk2? gtkmm?
612 | # in encrypted data bags:
613 | # isr_login
614 | # google login
615 | # etc
616 | # the above go in cron envs as well
617 |
618 |
619 | # latex settings
620 |
621 | file "/etc/texmf/texmf.d/01bioc.cnf" do
622 | content "shell_escape=t"
623 | owner "root"
624 | group "root"
625 | mode "0644"
626 | end
627 |
628 | execute "update-texmf" do
629 | action :run
630 | user "root"
631 | command "update-texmf"
632 | end
633 |
634 | # get stuff from encrypted data bags
635 |
636 | file "/home/biocbuild/.BBS/id_rsa" do
637 | owner "biocbuild"
638 | group "biocbuild"
639 | mode "0400"
640 | content Chef::EncryptedDataBagItem.load('BBS',
641 | 'incoming_private_key')['value']
642 | end
643 |
644 | execute "add public key to authorized_keys" do
645 | user "biocbuild"
646 | group "biocbuild"
647 | command "echo #{Chef::EncryptedDataBagItem.load('BBS',
648 | 'incoming_public_key')['value']} >> /home/biocbuild/.ssh/authorized_keys"
649 | not_if %Q(grep -q "#{Chef::EncryptedDataBagItem.load('BBS',
650 | 'incoming_public_key')['value']}" /home/biocbuild/.ssh/authorized_keys)
651 | end
652 |
653 | execute "add google api key to /etc/profile" do
654 | user "root"
655 | command %Q(echo "export GOOGLE_API_KEY=#{Chef::EncryptedDataBagItem.load('BBS',
656 | 'google_api_key')['value']}" >> /etc/profile)
657 | not_if %Q(grep -q GOOGLE_API_KEY /etc/profile)
658 | end
659 |
660 | execute "add ISR_login to /etc/profile" do
661 | user "root"
662 | command %Q(echo "export ISR_login=#{Chef::EncryptedDataBagItem.load('BBS',
663 | 'isr_credentials')['username']}" >> /etc/profile)
664 | not_if %Q(grep -q ISR_login /etc/profile)
665 | end
666 |
667 | execute "add ISR_pwd to /etc/profile" do
668 | user "root"
669 | command %Q(echo "export ISR_pwd=#{Chef::EncryptedDataBagItem.load('BBS',
670 | 'isr_credentials')['password']}" >> /etc/profile)
671 | not_if %Q(grep -q ISR_pwd /etc/profile)
672 | end
673 |
674 | file "/home/biocbuild/.ssh/id_rsa" do
675 | owner "biocbuild"
676 | group "biocbuild"
677 | mode "0400"
678 | content Chef::EncryptedDataBagItem.load('BBS',
679 | 'outgoing_private_key')['value']
680 | end
681 |
682 | # FIXME more stuff that needs to be in data bags:
683 | # * github oauth token for codecov
684 | # * codecov token
685 | # * aws credentials for archiving build reports to s3
686 |
687 |
688 | # set up cron.d entries for biocbuild
689 |
690 | # first, indicate in crontab to look elsewhere:
691 | execute "tell viewers of crontab to look in /etc/cron.d" do
692 | command %Q(echo "# scheduled tasks are defined in /etc/cron.d, not here" | crontab -)
693 | user "biocbuild"
694 | not_if %Q(crontab -l |grep -q "# scheduled tasks are defined in /etc/cron.d")
695 | end
696 |
697 | # cron_d "pre-build-script" do
698 | #
699 | # end
700 |
701 |
702 |
703 | # FIXME - set up pkgbuild stuff (e.g., logrotate) if this is a devel builder
704 | # github_chef_key (from data bag)
705 |
--------------------------------------------------------------------------------