├── .babelrc
├── .circleci
└── config.yml
├── .dockerignore
├── .editorconfig
├── .eslintrc
├── .gitignore
├── .prettierrc
├── Dockerfile
├── README.md
├── config
└── swarm-sync.yml
├── env_secrets_expand.sh
├── known_hosts
├── nodemon.json
├── package-lock.json
├── package.json
├── src
├── config.js
├── configRepo
│ ├── index.js
│ ├── pack.js
│ └── stack.js
├── docker
│ └── index.js
├── index.js
├── registry
│ ├── client.js
│ ├── index.js
│ └── patterns.js
├── state
│ └── index.js
├── sync
│ ├── configRepo.js
│ └── serviceImages.js
└── utils
│ ├── git.js
│ ├── index.js
│ └── logger.js
└── start.sh
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | ["@babel/preset-env", {"exclude": ["transform-regenerator"]}],
4 | ]
5 | }
--------------------------------------------------------------------------------
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | jobs:
3 | build_publish:
4 | machine: true
5 | steps:
6 | - checkout
7 | - run: docker login -u $DOCKER_USER -p $DOCKER_PASS
8 | - run: docker build -t swarmpack/swarm-sync:$CIRCLE_TAG .
9 | - run: docker push swarmpack/swarm-sync:$CIRCLE_TAG
10 |
11 | workflows:
12 | version: 2
13 | build_and_release:
14 | jobs:
15 | - build_publish:
16 | filters:
17 | branches:
18 | ignore: /.*/
19 | tags:
20 | only: /^v[0-9]+(\.[0-9]+)*$/
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .git
2 | .drone.yml
3 | .editorconfig
4 | .circleci
5 | .idea
6 | node_modules
7 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | indent_style = space
5 | indent_size = 2
6 | end_of_line = lf
7 | charset = utf-8
8 | trim_trailing_whitespace = false
9 | insert_final_newline = true
10 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "rules": {
3 | "prettier/prettier": ["error"],
4 | "function-paren-newline": "off",
5 | "no-underscore-dangle": "off",
6 | "import/extensions": [
7 | "error",
8 | "always",
9 | {
10 | "js": "never",
11 | "mjs": "never"
12 | }
13 | ],
14 | "no-restricted-syntax": [0, "ForInStatement", "ForOfStatement"],
15 | "no-await-in-loop": [0, "ForInStatement", "ForOfStatement"],
16 | "max-len": [1, 120, 2],
17 | "no-console": "off"
18 | },
19 | "globals": {
20 | "describe": true,
21 | "beforeEach": true,
22 | "before": true,
23 | "afterEach": true,
24 | "after": true,
25 | "it": true
26 | },
27 | "env": {
28 | "node": true,
29 | "es6": true
30 | },
31 |
32 | "extends": ["airbnb-base", "prettier"],
33 | "plugins": ["prettier"],
34 | "settings": {
35 | "import/resolver": {
36 | "node": {
37 | "extensions": [".js", ".mjs"]
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .idea
3 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 90,
3 | "singleQuote": true
4 | }
5 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:12-slim
2 | # set app port
3 | ENV PORT 80
4 | # Location of Swarm sync configuration file
5 | ENV SWARM_SYNC_CONFIG_FILE /etc/swarm-sync.yml
6 | # Location where we save state of swarm-sync
7 | ENV SWARM_SYNC_STATE_FILE /run/swarm-sync/state
8 | # Private SSH key used to generate /root/.ssh/id_rsa for private git repos
9 | ENV SSH_PRIVATE_KEY false
10 |
11 | # Log level - trace: 0, debug: 1, info: 2, warn: 3, error: 4, silent: 5
12 | ENV SWARM_SYNC_LOGLEVEL 2
13 |
14 | WORKDIR /opt
15 |
16 | RUN curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - && \
17 | apt-get update && \
18 | apt-get install -y apt-transport-https && \
19 | echo "deb https://download.docker.com/linux/debian stretch stable" | \
20 | tee /etc/apt/sources.list.d/docker.list && \
21 | curl https://download.docker.com/linux/debian/gpg | apt-key add - && \
22 | apt-get update && \
23 | apt-get install -y \
24 | docker-ce-cli ssh libssl-dev make g++ gnupg git && \
25 | curl -L https://github.com/AGWA/git-crypt/archive/0.6.0.tar.gz | tar zxv -C /var/tmp && \
26 | cd /var/tmp/git-crypt-0.6.0 && make && make install PREFIX=/usr/local && rm -rf /var/tmp/git-crypt-0.6.0 && \
27 | mkdir -p /root/.ssh
28 |
29 | COPY known_hosts /root/.ssh/known_hosts
30 | COPY . .
31 |
32 | RUN npm install --production && \
33 | chmod +x ./start.sh && \
34 | chmod +x ./env_secrets_expand.sh
35 |
36 | # expose port
37 | EXPOSE 80
38 |
39 | ENTRYPOINT [ "./start.sh"]
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Swarm Sync
2 |
3 | GitOps, a term coined by [WeaveWorks](https://www.weave.works/blog/gitops-operations-by-pull-request), is a way to do continuous delivery and manage your environments through git using pull requests.
4 |
5 | Swarm-sync runs as a service in your Docker Swarm. It watches a Config Repo (a git repository managed by you) for changes and deploys them to your Swarm. Additionally it can watch Docker Registries for new images for your services and update you swarm Services accordingly.
6 |
7 | The Config Repo contains a set of [Swarm Packs](https://github.com/swarm-pack/swarm-pack) and configuration values which will be used to set up your Swarm Services.
8 |
9 | The best way to understand the Config Repo is by looking at the [**example Config Repo**](https://github.com/swarm-pack/swarm-sync-example).
10 |
11 | ### Diagram
12 |
13 |
14 | +---------+ +---------+ +-----------+
15 | @ | Code | | | | Container |
16 | -|- +---------> | Repo +----------->+ CI +----------->+ Registry |
17 | / \ commit | | build | | push | |
18 | +---------+ +---------+ +-----------+
19 | Developer |
20 | |
21 | |
22 | +--------------------+
23 | |
24 | |
25 | v
26 | +---------+ +-----------+ +-----------+
27 | @ | Config | | Swarm | | Docker |
28 | -|- +---------> | Repo <----------->+ Sync +-----------> Swarm |
29 | / \ commit | | sync | | apply | |
30 | +---------+ +-----------+ +-----------+
31 | Developer
32 |
33 |
34 |
35 | ### Components of gitops
36 |
37 | There are 3 main components needed to achieve our GitOps pipeline:
38 |
39 | 1. **Configuration Repository** - a Git repository containing all the configuration for your Docker Swarm stacks. Example repository: [kevb/swarm-pack-example](https://github.com/kevb/swarm-sync-example)
40 |
41 | 2. **Container Registry** - A Docker registry to push our application images to (hopefully from a CI somewhere else)
42 |
43 | 3. **SwarmSync** - SwarmSync will be running inside your Swarm as a service and will be configured to point to the other 2 components.
44 |
45 | ### Swarm-Pack
46 |
47 | Swarm-Sync relies on [Swarm Pack](https://github.com/swarm-pack/swarm-pack) to compile and deploy services from templates. An understanding of swarm-pack is required to use swarm-sync, so if you haven't already take a look there.
48 |
49 | ### Quick-start guide
50 |
51 | 1. Fork the repo https://github.com/swarm-pack/swarm-sync-example - this guide will use this URL, but you should replace with your own fork and re-configure your own desired config.
52 |
53 | 2. Create a config file for swarm-sync, similar to this one:
54 |
55 | ```yaml
56 | swarm-sync:
57 |
58 | # Stacks in target for this swarm-sync instance
59 | stacks:
60 | - nonprod
61 |
62 | # Update frequency for polling repo and image registry for changes (ms)
63 | # Below 1 minute not recommended
64 | updateInterval: 60000
65 |
66 | # Git details for your Swarm Configuration Repository
67 | git:
68 | url: https://github.com/swarm-pack/swarm-sync-example
69 | branch: master
70 |
71 | # Common config with swarm-pack
72 | docker:
73 | socketPath: /var/run/docker.sock
74 | repositories:
75 | - name: official
76 | url: https://github.com/swarm-pack/repository
77 | ```
78 |
79 | Upload this file to a manager node in your Swarm
80 |
81 | 3. On the manager node, run
82 |
83 | ```
84 | docker run -it \
85 | -v /var/run/docker.sock:/var/run/docker.sock \
86 | -v /path/to/swarm-sync.yml:/etc/swarm-sync.yml \
87 | kevbuk/swarm-sync --bootstrap
88 | ```
89 |
90 | This uses the "--bootstrap" flag for swarm-sync, meaning it will not run as a daemon. That's because in our example config repo we have a swarm-pack configured for swarm-sync, so it will be deployed as a service. Make sure the swarm-sync config is the same inside your Config Repo values.
91 |
92 | 4. Check your desired services are now running on your Swarm
93 |
94 | ```bash
95 | docker service ls
96 | ```
97 |
98 | 5. Push a change to your config repo, and check that the services update themselves within 5 minutes!
99 |
100 | ### Stacks
101 |
102 | Stacks are a way to namespace things, and identify what is in scope for a particular instance of Swarm Sync.
103 |
104 | For example, you might have 2 Swarms: **prod** and **nonprod**. One approach is to create corresponding Stacks in your config repo at `stacks/prod` and `stacks/nonprod`. Each Swarm has an instance of swarm-sync with different config files. In your ***prod*** Stack instance, you would have:
105 | ```yaml
106 | stacks:
107 | - prod
108 | ```
109 |
110 | In your **nonprod** instance you'd have
111 | ```yaml
112 | stacks:
113 | - nonprod
114 | ```
115 |
116 | With this configuration, the stack defined in `stacks/nonprod/stack.yml` will be synced to your **nonprod** Swarm and the stack defined in `stacks/prod/stack.yml` will be synced to your **prod** Swarm.
117 |
118 | ### Configuration
119 |
120 | Config file should be mounted at /etc/swarm-sync.yml and Docker Config or Docker Secret is recommended for this.
121 |
122 | ### Registry authentication
123 |
124 | If watching for image updates in a private docker registry, you will need to define authentication credentials.
125 |
126 | This is done by mounting a docker secret matching the hostname of the registry at `/run/secrets/registries/[hostname]`.
127 |
128 | The secret should be a yml file in the following format:
129 |
130 | ```
131 | username: xxx
132 | password: xxx
133 | ```
134 |
135 | For example, to authenticate docker.example.com, we mount the following secret to `/run/secrets/registries/docker.example.com`:
136 |
137 | ```
138 | username: example
139 | password: changeme
140 | ```
141 |
142 | ## Development
143 |
144 | Node v11.9.0
145 | Yarn v1.7.0
146 |
147 | Install dependencies
148 |
149 | ```bash
150 | yarn install
151 | ```
152 |
153 | Start server development
154 |
155 | ```bash
156 | yarn start:dev
157 | ```
158 |
--------------------------------------------------------------------------------
/config/swarm-sync.yml:
--------------------------------------------------------------------------------
1 | swarm-sync:
2 |
3 | # Cache dir
4 |
5 |
6 | # Stacks in target for this swarm-sync instance
7 | stacks:
8 | - nonprod
9 |
10 | # Update frequency for polling repo and image registry for changes (ms)
11 | # Below 1 minute not recommended
12 | updateInterval: 60000
13 |
14 | # Git details for your Swarm Configuration Repository
15 | git:
16 | url: https://github.com/kevb/swarm-sync-example
17 | branch: master
18 |
19 | # Swarm-sync can run git-crypt to decrypt your repo with a symmetric key
20 | #git_crypt:
21 | # keyFile: /run/secrets/git-crypt-key
22 |
23 | # Common config with swarm-pack
24 | docker:
25 | socketPath: /var/run/docker.sock
26 | # used for throttling time between Docker registry requests
27 | minRegistryReqInterval: 2000
28 |
29 | # swarm-pack repos
30 | repositories:
31 | - name: official
32 | url: https://github.com/swarm-pack/repository
--------------------------------------------------------------------------------
/env_secrets_expand.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Pulling environment secrets..."
4 |
5 | : ${ENV_SECRETS_DIR:=/run/secrets}
6 |
7 | function env_secret_debug() {
8 | if [ ! -z "$ENV_SECRETS_DEBUG" ]; then
9 | echo -e "\033[1m$@\033[0m"
10 | fi
11 | }
12 |
13 | # usage: env_secret_expand VAR
14 | # ie: env_secret_expand 'XYZ_DB_PASSWORD'
15 | # (will check for "$XYZ_DB_PASSWORD" variable value for a placeholder that defines the
16 | # name of the docker secret to use instead of the original value. For example:
17 | # XYZ_DB_PASSWORD=DOCKER-SECRET->my-db.secret
18 | env_secret_expand() {
19 | var="$1"
20 | eval val=\$$var
21 | if secret_name=$(expr match "$val" "DOCKER-SECRET->\([^}]\+\)$"); then
22 | secret="${ENV_SECRETS_DIR}/${secret_name}"
23 | env_secret_debug "Secret file for $var: $secret"
24 | if [ -f "$secret" ]; then
25 | val=$(cat "${secret}")
26 | export "$var"="$val"
27 | env_secret_debug "Expanded variable: $var=$val"
28 | else
29 | env_secret_debug "Secret file does not exist! $secret"
30 | fi
31 | fi
32 | }
33 |
34 | env_secrets_expand() {
35 | for env_var in $(printenv | cut -f1 -d"=")
36 | do
37 | env_secret_expand $env_var
38 | done
39 |
40 | if [ ! -z "$ENV_SECRETS_DEBUG" ]; then
41 | echo -e "\n\033[1mExpanded environment variables\033[0m"
42 | printenv
43 | fi
44 | }
45 |
46 | env_secrets_expand
47 |
48 | echo "Finised pulling environment secrets"
49 |
--------------------------------------------------------------------------------
/known_hosts:
--------------------------------------------------------------------------------
1 | bitbucket.org ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAubiN81eDcafrgMeLzaFPsw2kNvEcqTKl/VqLat/MaB33pZy0y3rJZtnqwR2qOOvbwKZYKiEO1O6VqNEBxKvJJelCq0dTXWT5pbO2gDXC6h6QDXCaHo6pOHGPUy+YBaGQRGuSusMEASYiWunYN0vCAI8QaXnWMXNMdFP3jHAJH0eDsoiGnLPBlBp4TNm6rYI74nMzgz3B9IikW4WVK+dc8KZJZWYjAuORU3jc1c/NPskD2ASinf8v3xnfXeukU0sJ5N6m5E8VLjObPEO+mN2t/FZTMZLiFqPWc/ALSqnMnnhwrNi2rbfg/rd/IpL8Le3pSBne8+seeFVBoGqzHM9yXw==
2 | gitlab.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCsj2bNKTBSpIYDEGk9KxsGh3mySTRgMtXL583qmBpzeQ+jqCMRgBqB98u3z++J1sKlXHWfM9dyhSevkMwSbhoR8XIq/U0tCNyokEi/ueaBMCvbcTHhO7FcwzY92WK4Yt0aGROY5qX2UKSeOvuP4D6TPqKF1onrSzH9bx9XUf2lEdWT/ia1NEKjunUqu1xOB/StKDHMoX4/OKyIzuS0q/T1zOATthvasJFoPrAjkohTyaDUz2LN5JoH839hViyEG82yB+MjcFV5MU3N1l1QL3cVUCh93xSaua1N85qivl+siMkPGbO5xR/En4iEY6K2XPASUEMaieWVNTRCtJ4S8H+9
3 | gitlab.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFSMqzJeV9rUzU4kWitGjeR4PWSa29SPqJ1fVkhtj3Hw9xjLVXVYrU9QlYWrOLXBpQ6KWjbjTDTdDkoohFzgbEY=
4 | gitlab.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAfuCHKVTjquxvt6CM6tdG4SLp1Btn/nOeHHE5UOzRdf
5 | github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
--------------------------------------------------------------------------------
/nodemon.json:
--------------------------------------------------------------------------------
1 | {
2 | "verbose": true,
3 | "ignore": ["*.test.js", "fixtures/*"]
4 | }
5 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "swarm-sync",
3 | "version": "0.1.66",
4 | "description": "GitOps for Docker Swarm",
5 | "main": "src/index.js",
6 | "repository": "git@bitbucket.org:1dayio/microservice-reservations.git",
7 | "author": "Kevin Bennett ",
8 | "license": "MIT",
9 | "engines": {
10 | "node": ">=10.4"
11 | },
12 | "scripts": {
13 | "start": "node src/index.js",
14 | "dev": "nodemon --exec node src/index.js",
15 | "precommit": "yarn run lint",
16 | "lint": "eslint src/**",
17 | "prettier": "prettier --write"
18 | },
19 | "dependencies": {
20 | "apollo-errors": "^1.9.0",
21 | "apollo-server-express": "^1.3.6",
22 | "body-parser": "^1.18.3",
23 | "commander": "^2.19.0",
24 | "compare-versions": "^3.4.0",
25 | "config": "^3.0.1",
26 | "deep-extend": "^0.6.0",
27 | "docker-registry-client": "^3.3.0",
28 | "dockerode": "^2.5.8",
29 | "dotenv": "^6.0.0",
30 | "express": "^4.16.3",
31 | "fs-extra": "^7.0.1",
32 | "getenv": "^0.7.0",
33 | "graphql": "^0.13.2",
34 | "graphql-tools": "^3.0.2",
35 | "helmet": "^3.12.1",
36 | "js-yaml": "^3.13.0",
37 | "lodash": "^4.17.10",
38 | "lodash-id": "^0.14.0",
39 | "lowdb": "^1.0.0",
40 | "minimatch": "^3.0.4",
41 | "mongoose": "^5.1.5",
42 | "node-cache": "^4.2.0",
43 | "object-hash": "^1.3.1",
44 | "p-iteration": "^1.1.7",
45 | "project-version": "^1.0.0",
46 | "request": "^2.87.0",
47 | "request-promise": "^4.2.2",
48 | "request-promise-native": "^1.0.5",
49 | "semver": "^5.6.0",
50 | "shelljs": "^0.8.3",
51 | "simple-git": "^1.107.0",
52 | "swarm-pack": "https://github.com/swarm-pack/swarm-pack.git",
53 | "tmp": "^0.0.33",
54 | "utf8": "^3.0.0",
55 | "winston": "2.4.3",
56 | "winston-papertrail": "^1.0.5"
57 | },
58 | "devDependencies": {
59 | "@babel/cli": "^7.0.0-beta.51",
60 | "@babel/core": "^7.0.0-beta.51",
61 | "@babel/node": "^7.0.0-beta.51",
62 | "@babel/preset-env": "^7.0.0-beta.51",
63 | "@babel/register": "^7.0.0-beta.51",
64 | "chai": "^4.1.2",
65 | "cross-env": "^5.1.6",
66 | "eslint": "^4.19.1",
67 | "eslint-config-airbnb-base": "^12.1.0",
68 | "eslint-config-prettier": "^4.0.0",
69 | "eslint-plugin-import": "^2.16.0",
70 | "eslint-plugin-prettier": "^3.0.1",
71 | "mocha": "^5.2.0",
72 | "nodemon": "^1.17.5",
73 | "prettier": "^1.16.4",
74 | "rimraf": "^2.6.2",
75 | "husky": "^1.3.1",
76 | "lint-staged": "^8.1.5"
77 | },
78 | "lint-staged": {
79 | "src/**/*.{js,jsx}": [
80 | "eslint --fix",
81 | "git add"
82 | ],
83 | "src/**/*.{json,md,scss,yaml,yml}": [
84 | "prettier --write",
85 | "git add"
86 | ]
87 | },
88 | "husky": {
89 | "hooks": {
90 | "pre-commit": "lint-staged"
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/src/config.js:
--------------------------------------------------------------------------------
1 | const yaml = require('js-yaml');
2 | const fs = require('fs');
3 |
4 | // Environment options
5 | const configFilePath = process.env.SWARM_SYNC_CONFIG_FILE || './config/swarm-sync.yml';
6 |
7 | // Load config file
8 | const config = yaml.safeLoad(fs.readFileSync(configFilePath, 'utf8'))['swarm-sync'] || {};
9 |
10 | // CLI options override
11 | // --bootstrap - run update/deploy only one time, do not start as daemon
12 | config.bootstrap = process.argv.includes('--bootstrap');
13 |
14 | // Validations, defaults, transformations etc
15 | if (config.docker.socketPath && config.docker.host) {
16 | throw new Error(
17 | 'Cannot specify both docker.socketPath & docker.host in configuration.'
18 | );
19 | }
20 |
21 | if (config.docker.host) {
22 | config.docker.url = `${config.docker.protocol || 'http'}://${
23 | config.docker.host
24 | }:${config.docker.port || '2375'}`;
25 | } else {
26 | config.docker.url = `unix://${config.docker.socketPath || '/var/run/docker.sock'}`;
27 | }
28 |
29 | // Create a swarm-pack config to easily pass into swarm-pack
30 | config.swarmpack = {
31 | docker: config.docker,
32 | repositories: config.repositories
33 | };
34 |
35 | module.exports = config;
36 |
--------------------------------------------------------------------------------
/src/configRepo/index.js:
--------------------------------------------------------------------------------
1 | const tmp = require('tmp');
2 | const sh = require('shelljs');
3 | const fs = require('fs-extra');
4 | const yaml = require('js-yaml');
5 | const path = require('path');
6 | const log = require('../utils/logger');
7 | const git = require('../utils/git');
8 | const config = require('../config');
9 | const Stack = require('./stack');
10 |
11 | const tmpDir = tmp.dirSync();
12 | const repoPath = tmpDir.name;
13 | const repo = git(repoPath);
14 |
15 | async function gitCryptUnlock() {
16 | return new Promise((resolve, reject) => {
17 | sh.cd(repoPath);
18 | sh.exec(`git-crypt unlock ${config.git_crypt.keyFile}`, (code, stdout, stderr) => {
19 | if (code === 0) {
20 | resolve();
21 | } else {
22 | reject(new Error(stderr));
23 | }
24 | });
25 | });
26 | }
27 |
28 | async function checkForUpdates() {
29 | repo.cwd(repoPath);
30 |
31 | if (await repo.checkIsRepo()) {
32 | await repo.pull();
33 | } else {
34 | await repo.clone(config.git.url, repoPath);
35 | }
36 |
37 | if (config.git_crypt && config.git_crypt.keyFile) {
38 | await gitCryptUnlock();
39 | }
40 |
41 | const stackDirs = await fs.readdir(path.resolve(repoPath, 'stacks'));
42 | // Stacks in target are defined in config.stacks, and have a corresponding stacks/[stack-name]/stack.yml in the repo
43 | const targetStacks = stackDirs.filter(s => config.stacks.includes(s));
44 | log.info(`Target stacks: ${targetStacks.join(',')}`);
45 |
46 | const stacks = targetStacks.map(
47 | stackName =>
48 | new Stack({
49 | name: stackName,
50 | stackDef: yaml.safeLoad(
51 | fs.readFileSync(
52 | path.resolve(repoPath, 'stacks', stackName, 'stack.yml'),
53 | 'utf8'
54 | )
55 | ),
56 | configRepoPath: repoPath
57 | })
58 | );
59 |
60 | const changedStacks = [];
61 |
62 | for (const stack of stacks) {
63 | const changedStackPacks = await stack.getChanges();
64 | if (changedStackPacks.length) {
65 | changedStacks.push({
66 | stack,
67 | packs: changedStackPacks
68 | });
69 | }
70 | }
71 |
72 | return changedStacks;
73 | }
74 |
75 | module.exports = {
76 | checkForUpdates
77 | };
78 |
--------------------------------------------------------------------------------
/src/configRepo/pack.js:
--------------------------------------------------------------------------------
1 | const { resolve, join } = require('path');
2 | const SwarmPack = require('swarm-pack');
3 | const yaml = require('js-yaml');
4 | const fs = require('fs-extra');
5 | const deepExtend = require('deep-extend');
6 | const oHash = require('object-hash');
7 | const log = require('../utils/logger');
8 | const { updateTagCache, getNewestTagFromCache } = require('../registry');
9 | const { findKeyInObject } = require('../utils');
10 | const config = require('../config');
11 |
12 | const swarmpack = SwarmPack({ config: config.swarmpack });
13 |
14 | class Pack {
15 | constructor({ packDef, stackName, configRepoPath }) {
16 | this.packDef = packDef;
17 | this.values = {};
18 |
19 | log.trace('');
20 | if (packDef.values_file) {
21 | this.values = yaml.safeLoad(
22 | fs.readFileSync(
23 | resolve(configRepoPath, 'stacks', stackName, packDef.values_file),
24 | 'utf8'
25 | )
26 | );
27 | }
28 |
29 | if (packDef.values) {
30 | this.values = deepExtend({}, this.values, packDef.values);
31 | }
32 |
33 | this.pack = packDef.pack;
34 | // Normalize path
35 | if (
36 | !packDef.pack.includes('/') &&
37 | !packDef.pack.includes(':') &&
38 | !packDef.pack.includes('\\')
39 | ) {
40 | // Pack refers to a pack inside the config repo /packs
41 | this.ref = join(configRepoPath, 'packs', packDef.pack);
42 | } else {
43 | this.ref = packDef.pack;
44 | }
45 | }
46 |
47 | async getLastCommit() {
48 | return this.inspect().then(i => i.commit_hash);
49 | }
50 |
51 | async inspect() {
52 | return swarmpack.inspectPack(this.ref);
53 | }
54 |
55 | /**
56 | * Like Helm/Flux we interpret some specific structures in values
57 | * and automatically process them in a specific way
58 | * https://github.com/stefanprodan/gitops-helm
59 | *
60 | * image:
61 | * repository: ngninx
62 | * tag: 1.4.1
63 | * tag_pattern: 1.4.*
64 | *
65 | * In this instance tag_pattern will be matched in registry and the latest matching
66 | * tag will replace `tag` in values when passed into swarm-pack
67 | */
68 | async getPreparedValues() {
69 | if (!this.values) return {};
70 | for (const imageDef of findKeyInObject('image', this.values)) {
71 | if (imageDef.tag_pattern) {
72 | await updateTagCache(imageDef.repository, imageDef.tag_pattern);
73 | const newestTag = getNewestTagFromCache(
74 | imageDef.repository,
75 | imageDef.tag_pattern
76 | );
77 | if (newestTag) {
78 | imageDef.tag = newestTag;
79 | } else {
80 | log.warn(
81 | `Didn't find tag matching '${imageDef.tag_pattern}' for ${
82 | imageDef.repository
83 | }, using default '${imageDef.tag}'`
84 | );
85 | }
86 | }
87 | }
88 | return this.values;
89 | }
90 |
91 | async getValuesHash() {
92 | return oHash(await this.getPreparedValues());
93 | }
94 | }
95 |
96 | module.exports = Pack;
97 |
--------------------------------------------------------------------------------
/src/configRepo/stack.js:
--------------------------------------------------------------------------------
1 | const piteration = require('p-iteration');
2 | const log = require('../utils/logger');
3 | const git = require('../utils/git');
4 | const Pack = require('./pack');
5 | const { getDeployedStackPack, getDeployedStack, needsRetry } = require('../state');
6 | const config = require('../config');
7 |
8 | const { filter } = piteration;
9 |
10 | class Stack {
11 | constructor({ name, stackDef, configRepoPath }) {
12 | this.stackDef = stackDef;
13 | this.name = name;
14 | this.git = git(configRepoPath);
15 | let packs = [...stackDef.packs];
16 | // If bootstrap, we try to look for only swarm-sync pack and exclude others
17 | if (config.bootstrap) {
18 | packs = packs.filter(packDef => packDef.pack.includes('swarm-sync'));
19 | log.info('Bootstrap mode - only running the following packs:');
20 | log.info(packs);
21 | }
22 |
23 | // Instantiate packs
24 | this.packs = packs.map(
25 | packDef => new Pack({ packDef, stackName: this.name, configRepoPath })
26 | );
27 | }
28 |
29 | async getLastCommit() {
30 | return this.git
31 | .log({ file: `stacks/${this.name}` })
32 | .then(logs => logs.latest.hash)
33 | .catch(() => undefined);
34 | }
35 |
36 | async getChanges() {
37 | // If stack def changed, return all packs as changed
38 | if (getDeployedStack({ stack: this.name }).commit !== (await this.getLastCommit())) {
39 | log.debug(`Stack (${this.name}) definiton changed (git commit)`);
40 | log.trace(`Deployed: ${getDeployedStack({ stack: this.name }).commit}`);
41 | log.trace(`Compared: ${await this.getLastCommit()}`);
42 | return this.packs;
43 | }
44 |
45 | // If stack def didn't change, return a list of individual packs that changed (if any)
46 | return filter(this.packs, async pack => {
47 | try {
48 | const deployedStackDetails = getDeployedStackPack({
49 | stack: this.name,
50 | pack: pack.pack
51 | });
52 |
53 | if ((await pack.getLastCommit()) !== deployedStackDetails.commit) {
54 | log.debug(
55 | `Pack '${pack.pack}'' in '${this.name}' stack has changed (git commit)`
56 | );
57 | return true;
58 | }
59 |
60 | if ((await pack.getValuesHash()) !== deployedStackDetails.valuesHash) {
61 | log.debug(
62 | `Values for '${pack.pack}'' in '${this.name}' stack have changed (git commit)`
63 | );
64 | return true;
65 | }
66 |
67 | if (needsRetry({ stack: this.name, pack: pack.pack })) {
68 | log.debug(
69 | `Will retry '${pack.pack}'' in '${this.name}' due to previous failure`
70 | );
71 | return true;
72 | }
73 | } catch (error) {
74 | log.error(error);
75 | log.error(`Error processing pack ${pack.pack} for changes - skipping`);
76 | return false;
77 | }
78 |
79 | return false;
80 | });
81 | }
82 | }
83 |
84 | module.exports = Stack;
85 |
--------------------------------------------------------------------------------
/src/docker/index.js:
--------------------------------------------------------------------------------
1 | const utf8 = require('utf8');
2 | const Docker = require('dockerode');
3 | const log = require('../utils/logger');
4 | const config = require('../config');
5 |
6 | const client = new Docker({ socketPath: config.docker.socketPath });
7 |
8 | class DockerService {
9 | constructor(serviceDef) {
10 | const [currentImage] = serviceDef.Spec.TaskTemplate.ContainerSpec.Image.split('@');
11 | this.current_image = currentImage;
12 | const [imageRepo, imageTag] = this.current_image.split(':');
13 | this.current_image_repo = imageRepo;
14 | this.current_image_tag = imageTag || 'latest';
15 | this.id = serviceDef.ID;
16 | this.name = serviceDef.Spec.Name;
17 | this.pattern = serviceDef.Spec.Labels['swarm-sync.image-pattern'] || null;
18 |
19 | // dockerode giving us scalar unicode back for some chars.
20 | // so far only observed issue with labels
21 | // decoding here should resolve issue
22 | if (this.pattern) {
23 | this.pattern = utf8.decode(this.pattern);
24 | }
25 | }
26 | }
27 |
28 | async function getManagedServices() {
29 | const serviceData = await client.listServices();
30 | const managedServices = serviceData
31 | .filter(s => s.Spec.Labels['swarm-sync.managed'] === 'true')
32 | .map(s => new DockerService(s));
33 | return managedServices;
34 | }
35 |
36 | async function updateServiceImage(id, image) {
37 | const serviceData = await client.getService(id).inspect();
38 | const update = serviceData.Spec;
39 | update.version = parseInt(serviceData.Version.Index, 10);
40 | update.TaskTemplate.ContainerSpec.Image = image;
41 | update.TaskTemplate.ForceUpdate = 1;
42 | log.info(`Updating service ${id} to image ${image}`);
43 | await client.getService(id).update(update);
44 | }
45 |
46 | module.exports = {
47 | getManagedServices,
48 | updateServiceImage
49 | };
50 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env node
2 | const log = require('./utils/logger');
3 | // Init logging
4 | log.setLevel(process.env.SWARM_SYNC_LOGLEVEL || 2);
5 |
6 | const config = require('./config');
7 | const { checkAndDeployRepo } = require('./sync/configRepo');
8 | const { checkAndUpdateImages } = require('./sync/serviceImages');
9 |
10 | let exit = false; // exit flag for graceful exit
11 | let active = true; // Set to false when 'waiting' between updates, to indicate we can safely exit
12 |
13 | process.on('SIGTERM', () => {
14 | // Received signal to terminate, likely Docker updating the service
15 | log.warn(`Received SIGTERM signal`);
16 | if (active) {
17 | exit = true;
18 | log.warn('Waiting for operations to complete before exit...');
19 | } else {
20 | process.exit(0);
21 | }
22 | });
23 |
24 | async function startUpdates() {
25 | try {
26 | active = true;
27 | if (!exit) await checkAndDeployRepo();
28 | if (!exit) await checkAndUpdateImages();
29 | active = false;
30 | } catch (error) {
31 | log.error('Fatal unhandled exception');
32 | log.error(error);
33 | process.exit(1);
34 | }
35 |
36 | // Exit if we got a SIGTERM earlier
37 | if (exit) {
38 | log.warn('Operations complete, exiting');
39 | process.exit(0);
40 | }
41 |
42 | // Either loop again or exit if bootstrap mode
43 | if (!config.bootstrap) {
44 | log.info(
45 | `\n -- Waiting ${config.updateInterval / 1000} seconds for next scan. -- \n`
46 | );
47 | setTimeout(startUpdates, config.updateInterval);
48 | } else {
49 | log.info('Bootstrap complete');
50 | process.exit(0);
51 | }
52 | }
53 |
54 | startUpdates();
55 |
--------------------------------------------------------------------------------
/src/registry/client.js:
--------------------------------------------------------------------------------
1 | const drc = require('docker-registry-client');
2 | const fs = require('fs-extra');
3 | const yaml = require('js-yaml');
4 | const path = require('path');
5 | const sh = require('shelljs');
6 | const log = require('../utils/logger');
7 | const config = require('../config');
8 | const { retryAsync } = require('../utils');
9 |
10 | const registrySecretsPath = '/run/secrets/registries/';
11 |
12 | /** Registry Client for a particular image reference (promisified docker-registry-client) * */
13 | class RegistryClient {
14 | constructor(repo) {
15 | this.repo = repo;
16 | const repoAndRef = drc.parseRepoAndRef(repo);
17 | const registry = repoAndRef.index.name;
18 | const clientConfig = { name: repo };
19 |
20 | // Look for matching secrets
21 | if (fs.existsSync(path.join(registrySecretsPath, registry))) {
22 | log.debug(`Found registry credentials for ${registry}`);
23 | const auth = yaml.safeLoad(
24 | fs.readFileSync(path.join(registrySecretsPath, registry), 'utf8')
25 | );
26 | if (auth && auth.username && auth.password) {
27 | // Use `docker login` to test credentials,
28 | // And to create entry in ~/.docker/config.json which will be used by SP
29 | if (
30 | sh.exec(`docker login ${registry} -u ${auth.username} -p ${auth.password}`, {
31 | silent: true
32 | }).code === 0
33 | ) {
34 | clientConfig.username = auth.username;
35 | clientConfig.password = auth.password;
36 | } else {
37 | log.warn(`Could not login to ${registry} with credentials`);
38 | }
39 | } else {
40 | log.warn(`Invalid format for ${path.join(registrySecretsPath, registry)}`);
41 | }
42 | }
43 |
44 | this.drc = drc.createClientV2(clientConfig);
45 | }
46 |
47 | async listTags() {
48 | return new Promise((resolve, reject) => {
49 | this.drc.listTags((err, response) => (err ? reject(err) : resolve(response.tags)));
50 | });
51 | }
52 |
53 | async getManifest(opts) {
54 | return retryAsync(
55 | this._getManifest.bind(this),
56 | { delay: config.docker.minRegistryReqInterval },
57 | opts
58 | );
59 | }
60 |
61 | async _getManifest(opts) {
62 | const that = this;
63 | return new Promise((resolve, reject) => {
64 | log.info(`fetching manifest for ${opts.ref}`);
65 | that.drc.getManifest(opts, (err, response) =>
66 | err ? reject(err) : resolve(response)
67 | );
68 | });
69 | }
70 |
71 | async getCreated({ ref }) {
72 | const manifest = await this.getManifest({
73 | ref,
74 | acceptManifestLists: true,
75 | maxSchemaVersion: 1
76 | });
77 | if (manifest && manifest.history && manifest.history[0].v1Compatibility) {
78 | const v1Manifest = JSON.parse(manifest.history[0].v1Compatibility);
79 | return new Date(v1Manifest.created);
80 | }
81 | return null;
82 | }
83 | }
84 |
85 | module.exports = RegistryClient;
86 |
--------------------------------------------------------------------------------
/src/registry/index.js:
--------------------------------------------------------------------------------
1 | const NodeCache = require('node-cache');
2 | const log = require('../utils/logger');
3 | const RegistryClient = require('./client');
4 | const patterns = require('./patterns');
5 | const { wait } = require('../utils');
6 | const config = require('../config');
7 |
8 | const cache = new NodeCache();
9 |
10 | function getCacheKey(repo) {
11 | return `tags_${repo}`;
12 | }
13 |
14 | async function updateTagCache(repo, pattern) {
15 | // TODO - we might need to sanitize "repo" for cachekey as it probably contains illegal chars
16 | const cacheKey = getCacheKey(repo);
17 | const tagCache = cache.get(cacheKey) || [];
18 |
19 | const client = new RegistryClient(repo);
20 | let tagList = await client.listTags();
21 | const fetchedAt = new Date();
22 |
23 | if (pattern) {
24 | tagList = tagList.filter(patterns.getFilter(pattern));
25 | }
26 |
27 | for (const tag of tagList) {
28 | if (!tagCache.find(t => t.tag === tag)) {
29 | const tagEntry = {
30 | tag,
31 | firstSeen: fetchedAt
32 | };
33 |
34 | // Registry client frequently giving bad request error on get manifest
35 | // Adding try/catch to skip for now
36 | try {
37 | if (!patterns.isSemanticSort(pattern)) {
38 | await wait(config.docker.minRegistryReqInterval);
39 | tagEntry.created = await client.getCreated({ ref: tag });
40 | }
41 |
42 | tagCache.push(tagEntry);
43 | } catch (error) {
44 | log.warn(`Failed getting tag manifest for ${tag} - skipping`);
45 | log.trace(error);
46 | }
47 | }
48 | }
49 | cache.set(cacheKey, tagCache);
50 | }
51 |
52 | function getCachedTags(repo, pattern) {
53 | const tagCache = cache.get(getCacheKey(repo)) || [];
54 | return pattern ? tagCache.filter(patterns.getFilter(pattern)) : tagCache;
55 | }
56 |
57 | function getNewestTagFromCache(repo, pattern) {
58 | const tagCache = getCachedTags(repo, pattern);
59 | tagCache.sort(patterns.getSort(pattern));
60 |
61 | if (tagCache.length) {
62 | return tagCache[tagCache.length - 1].tag;
63 | }
64 | return null;
65 | }
66 |
67 | module.exports = {
68 | updateTagCache,
69 | getNewestTagFromCache,
70 | getCachedTags
71 | };
72 |
--------------------------------------------------------------------------------
/src/registry/patterns.js:
--------------------------------------------------------------------------------
1 | const minimatch = require('minimatch');
2 | const compareVersions = require('compare-versions');
3 | const semver = require('semver');
4 |
5 | /**
6 | * TODO - this will probably need a good refactor eventually
7 | * for now, just consolidating the code around tag patterns into one place
8 | *
9 | * Types of pattern:
10 | * `glob` - "Unix style pathname expression, e.g. dev*"
11 | * `semver` - "Semantic versioning pattern, e.g. v1.0.*"
12 | * `literal` - "Exact match, e.g. latest"
13 | */
14 |
15 | function splitTypeAndPattern(tagPattern) {
16 | const [type, pattern] = tagPattern.split(':');
17 | if (!pattern) {
18 | throw Error(`tag_pattern '${tagPattern}' looks wrong`);
19 | }
20 | return { type, pattern };
21 | }
22 |
23 | // Does this particular pattern sort semantically
24 | // i.e. doesn't care about created timestamp
25 | function isSemanticSort(tagPattern) {
26 | return splitTypeAndPattern(tagPattern).type === 'semver';
27 | }
28 |
29 | const filters = {
30 | semver: pattern => tag => {
31 | const t = semver.clean(tag.tag || tag);
32 | // If cannot be coerced to a version, filter out
33 | // If it can, test if it satisfies the pattern
34 | return t !== null ? semver.satisfies(t, pattern) : false;
35 | },
36 | glob: pattern => tag => minimatch(tag.tag || tag, pattern),
37 | literal: pattern => tag => (tag.tag || tag) === pattern
38 | };
39 |
40 | /**
41 | * Get filter function for a specified tag_pattern, e.g. 'semver:v1.0.*' or 'glob:*'
42 | * Filter function will accept tag as either string or object with .tag property
43 | * */
44 | function getFilter(tagPattern) {
45 | const { type, pattern } = splitTypeAndPattern(tagPattern);
46 | return filters[type] ? filters[type](pattern) : false;
47 | }
48 |
49 | // Different sorts for different types of pattern
50 | // For consistent results, we never want to return 0 (i.e. equal)
51 | function getSort(tagPattern) {
52 | const { type } = splitTypeAndPattern(tagPattern);
53 | return type === 'semver'
54 | ? (a, b) => compareVersions(a.tag, b.tag) || (a.tag > b.tag ? 1 : -1)
55 | : (a, b) => a.created - b.created || (a.tag > b.tag ? 1 : -1);
56 | }
57 |
58 | module.exports = {
59 | getFilter,
60 | getSort,
61 | isSemanticSort
62 | };
63 |
--------------------------------------------------------------------------------
/src/state/index.js:
--------------------------------------------------------------------------------
1 | const low = require('lowdb');
2 | const FileSync = require('lowdb/adapters/FileSync');
3 | const Memory = require('lowdb/adapters/Memory');
4 | const log = require('../utils/logger');
5 |
6 | const stateStoragePath = process.env.SWARM_SYNC_STATE_FILE || false;
7 |
8 | let adapter;
9 | const opts = { defaultValue: { stacks: {} } };
10 | if (stateStoragePath) {
11 | adapter = new FileSync(process.env.SWARM_SYNC_STATE_FILE, opts);
12 | } else {
13 | adapter = new Memory(null, opts);
14 | }
15 |
16 | const db = low(adapter);
17 | db.read();
18 |
19 | function _ensureStackInDB({ stack }) {
20 | if (!db.has(`stacks.${stack}`).value()) {
21 | db.set(`stacks.${stack}`, { commit: '', packs: {} }).write();
22 | }
23 | }
24 |
25 | function _ensureStackPackInDB({ stack, pack }) {
26 | _ensureStackInDB({ stack });
27 | if (!db.has(`stacks.${stack}.packs.${pack}`).value()) {
28 | db.set(`stacks.${stack}.packs.${pack}`, {
29 | commit: '',
30 | valuesHash: '',
31 | failures: 0
32 | }).write();
33 | }
34 | }
35 |
36 | function setDeployedStack({ stack, commit }) {
37 | _ensureStackInDB({ stack });
38 | db.set(`stacks.${stack}.commit`, commit).write();
39 | log.trace('setDeployedStack', stack, commit);
40 | }
41 |
42 | function getDeployedStack({ stack }) {
43 | _ensureStackInDB({ stack });
44 | return db.get(`stacks.${stack}`).value();
45 | }
46 |
47 | function setDeployedStackPack({ stack, pack, commit, valuesHash }) {
48 | _ensureStackInDB({ stack });
49 | db.set(`stacks.${stack}.packs.${pack}`, { commit, valuesHash, failures: 0 }).write();
50 | log.trace('setDeployedStackPack', stack, pack, commit, valuesHash);
51 | }
52 |
53 | function getDeployedStackPack({ stack, pack }) {
54 | _ensureStackPackInDB({ stack, pack });
55 | return db.get(`stacks.${stack}.packs.${pack}`).value();
56 | }
57 |
58 | function markStackPackForRetry({ stack, pack }) {
59 | _ensureStackPackInDB({ stack, pack });
60 | db.update(`stacks.${stack}.packs.${pack}.failures`, n => n + 1).write();
61 | log.trace('markStackPackForRetry', stack, pack);
62 | }
63 |
64 | function needsRetry({ stack, pack }) {
65 | // TODO - we could put a limit to the retry count here...
66 | return db.get(`stacks.${stack}.packs.${pack}.failures`).value() > 0;
67 | }
68 |
69 | module.exports = {
70 | getDeployedStackPack,
71 | getDeployedStack,
72 | setDeployedStack,
73 | setDeployedStackPack,
74 | markStackPackForRetry,
75 | needsRetry
76 | };
77 |
--------------------------------------------------------------------------------
/src/sync/configRepo.js:
--------------------------------------------------------------------------------
1 | const log = require('../utils/logger');
2 | const config = require('../config');
3 | const { checkForUpdates } = require('../configRepo');
4 | const {
5 | setDeployedStack,
6 | setDeployedStackPack,
7 | markStackPackForRetry
8 | } = require('../state');
9 | const swarmpack = require('swarm-pack')({ config: config.swarmpack });
10 |
11 | async function checkAndDeployRepo() {
12 | log.info('Polling git repository for changes');
13 | const changedStacks = await checkForUpdates();
14 | if (changedStacks.length) {
15 | log.info(`Changes found, redeploying ${changedStacks.length} stacks`);
16 | for (const changedStack of changedStacks) {
17 | for (const pack of changedStack.packs) {
18 | try {
19 | const values = await pack.getPreparedValues();
20 | log.debug(
21 | `Running equivalent to: swarm-pack deploy ${pack.ref} ${
22 | changedStack.stack.name
23 | }`
24 | );
25 |
26 | await swarmpack.compileAndDeploy({
27 | stack: changedStack.stack.name,
28 | packRef: pack.ref,
29 | values
30 | });
31 |
32 | setDeployedStackPack({
33 | stack: changedStack.stack.name,
34 | pack: pack.pack,
35 | commit: await pack.getLastCommit(),
36 | valuesHash: await pack.getValuesHash()
37 | });
38 | } catch (error) {
39 | log.error(error);
40 | log.error(
41 | `\nFailed deploying ${
42 | pack.ref
43 | }. Will not mark as updated and will retry next cycle.`
44 | );
45 |
46 | log.debug('values: ', await pack.getPreparedValues());
47 |
48 | // Mark pack as needing retry
49 | markStackPackForRetry({ stack: changedStack.stack.name, pack: pack.pack });
50 | }
51 | }
52 | setDeployedStack({
53 | stack: changedStack.stack.name,
54 | commit: await changedStack.stack.getLastCommit()
55 | });
56 | }
57 | } else {
58 | log.info('No changes in config repository to deploy');
59 | }
60 | }
61 |
62 | module.exports = {
63 | checkAndDeployRepo
64 | };
65 |
--------------------------------------------------------------------------------
/src/sync/serviceImages.js:
--------------------------------------------------------------------------------
1 | const log = require('../utils/logger');
2 | const { getManagedServices, updateServiceImage } = require('../docker');
3 | const { updateTagCache, getNewestTagFromCache } = require('../registry');
4 |
5 | async function checkAndUpdateImages() {
6 | const managedServices = await getManagedServices();
7 | if (managedServices.length === 0) {
8 | log.info('No swarm-sync managed services found in swarm');
9 | return;
10 | }
11 | log.info(`Found ${managedServices.length} swarm-sync managed services`);
12 | log.debug(managedServices.map(s => s.name).join(', '));
13 |
14 | for (const service of managedServices) {
15 | if (service.pattern) {
16 | await updateTagCache(service.current_image_repo, service.pattern);
17 | const newestTag = getNewestTagFromCache(
18 | service.current_image_repo,
19 | service.pattern
20 | );
21 |
22 | log.trace(
23 | `Newest matching tag for '${
24 | service.name
25 | }' found is '${newestTag}' (current tag '${service.current_image_tag}')`
26 | );
27 |
28 | if (newestTag && newestTag !== service.current_image_tag) {
29 | await updateServiceImage(
30 | service.id,
31 | `${service.current_image_repo}:${newestTag}`
32 | );
33 | }
34 | }
35 | }
36 | }
37 |
38 | module.exports = {
39 | checkAndUpdateImages
40 | };
41 |
--------------------------------------------------------------------------------
/src/utils/git.js:
--------------------------------------------------------------------------------
1 | const simpleGit = require('simple-git/promise');
2 |
3 | const GIT_SSH_COMMAND = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no';
4 |
5 | function git(path) {
6 | return simpleGit(path).env({ ...process.env, GIT_SSH_COMMAND });
7 | }
8 |
9 | module.exports = git;
10 |
--------------------------------------------------------------------------------
/src/utils/index.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Async function to wait specific amount of `ms` then resolve a promise (no return value)
3 | */
4 | async function wait(ms) {
5 | return new Promise(resolve => setTimeout(resolve, ms));
6 | }
7 |
8 | function rejectDelay(delay) {
9 | return async function(reason) {
10 | return new Promise(function(resolve, reject) {
11 | setTimeout(reject.bind(null, reason), delay);
12 | });
13 | };
14 | }
15 |
16 | /**
17 | * retry an async function n times with a delay (including inital delay)
18 | * retryAsync(myFunction, { maxTries: 5, delay: 2000 }, arg1, arg2,...)
19 | */
20 | async function retryAsync(fn, opts, ...args) {
21 | const options = Object.assign({ maxTries: 2, delay: 2000 }, opts);
22 | let p = Promise.reject();
23 | for (let i = 0; i < options.maxTries; i += 1) {
24 | p = p.catch(rejectDelay(options.delay)).catch(() => fn(...args));
25 | }
26 | return p;
27 | }
28 |
29 | /**
30 | * Find one or more instances of `key` in `object`, return array of values
31 | * No guaranteed order for return values. Returns first found in each branch only
32 | * i.e. assumes `key` doesn't contain a subentry called `key` or if it does, we don't care
33 | */
34 | function findKeyInObject(key, object) {
35 | let found = [];
36 | for (const [objectKey, objectValue] of Object.entries(object)) {
37 | if (objectKey === key) {
38 | found.push(objectValue);
39 | } else if (
40 | typeof objectValue === 'object' &&
41 | objectValue !== null &&
42 | !Array.isArray(objectValue)
43 | ) {
44 | found = [...found, ...findKeyInObject(key, objectValue)];
45 | }
46 | }
47 | return found;
48 | }
49 |
50 | module.exports = { wait, retryAsync, findKeyInObject };
51 |
--------------------------------------------------------------------------------
/src/utils/logger.js:
--------------------------------------------------------------------------------
1 | const levels = {
2 | TRACE: 0,
3 | DEBUG: 1,
4 | INFO: 2,
5 | WARN: 3,
6 | ERROR: 4,
7 | SILENT: 5
8 | };
9 |
10 | let currentLevel = 2;
11 |
12 | function _log(level, ...args) {
13 | if (level >= currentLevel) {
14 | console.log(...args);
15 | }
16 | }
17 |
18 | module.exports = {
19 | setLevel: level => {
20 | if (typeof levels[level] === 'number') {
21 | currentLevel = levels[level];
22 | } else {
23 | currentLevel = !Number.isNaN(parseInt(level, 10))
24 | ? parseInt(level, 10)
25 | : currentLevel;
26 | }
27 | },
28 | trace: (...args) => _log(levels.TRACE, ...args),
29 | debug: (...args) => _log(levels.DEBUG, ...args),
30 | info: (...args) => _log(levels.INFO, ...args),
31 | warn: (...args) => _log(levels.WARN, ...args),
32 | error: (...args) => _log(levels.ERROR, ...args)
33 | };
34 |
--------------------------------------------------------------------------------
/start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "LOADING SECRETS..."
4 | source env_secrets_expand.sh
5 |
6 | if [ "$SSH_PRIVATE_KEY" != "false" ]
7 | then
8 | echo "CREATE ID_RSA FROM SSH_PRIVATE_KEY"
9 | echo -e "$SSH_PRIVATE_KEY" > /root/.ssh/id_rsa
10 | chmod 600 /root/.ssh/id_rsa
11 | fi
12 |
13 | echo "START THE APP..."
14 | exec node src/index.js "$@"
--------------------------------------------------------------------------------