├── src
├── __spec__
│ ├── fixtures
│ │ ├── .gitignore
│ │ ├── icon.png
│ │ └── icon2.png
│ ├── rest.ts
│ ├── healthcheck_spec.ts
│ ├── _helpers.ts
│ ├── channel_spec.ts
│ └── webhook_spec.ts
├── files
│ ├── utils
│ │ ├── sha.ts
│ │ ├── __spec__
│ │ │ ├── sha_spec.ts
│ │ │ ├── tmp_spec.ts
│ │ │ └── p-queue_spec.ts
│ │ ├── tmp.ts
│ │ ├── spawn.ts
│ │ ├── sync.ts
│ │ ├── p-queue.ts
│ │ ├── gpg.ts
│ │ ├── win32.ts
│ │ ├── darwin.ts
│ │ ├── yum.ts
│ │ └── apt.ts
│ ├── store.ts
│ ├── local
│ │ └── LocalStore.ts
│ ├── s3
│ │ ├── CloudFrontBatchInvalidator.ts
│ │ └── S3Store.ts
│ └── __spec__
│ │ └── LocalStore_spec.ts
├── db
│ ├── driver.ts
│ ├── BaseDriver.ts
│ └── sequelize
│ │ └── models
│ │ └── index.ts
├── utils
│ └── a.ts
├── rest
│ ├── auth-strategy
│ │ ├── index.ts
│ │ ├── github.ts
│ │ ├── _types.ts
│ │ ├── local.ts
│ │ └── openid.ts
│ ├── admin.ts
│ ├── _helpers.ts
│ ├── auth.ts
│ ├── migration.ts
│ └── WebHook.ts
├── migrations
│ ├── index.ts
│ ├── BaseMigration.ts
│ ├── file-sha
│ │ └── FileSHAMigration.ts
│ ├── file-index
│ │ ├── FileIndexMigration.ts
│ │ └── __spec__
│ │ │ └── FileIndexMigration_spec.ts
│ └── latest-installer
│ │ ├── LatestInstallerMigration.ts
│ │ └── __spec__
│ │ └── LatestInstallerMigration_spec.ts
├── config.ts
└── index.ts
├── docs
├── images
│ └── arch.png
├── Architecture.md
├── Endpoints.md
├── Uploading.md
├── API.md
├── Staged Rollouts.md
├── Latest Releases.md
└── WebHooks.md
├── public
├── favicon.png
├── components
│ ├── HelpHeader.scss
│ ├── AppList.scss
│ ├── WebHookLogsModal.scss
│ ├── PageWrapper.scss
│ ├── PageLoader.tsx
│ ├── CreateAppModal.scss
│ ├── PageLoader.scss
│ ├── AppCard.scss
│ ├── MigrationList.scss
│ ├── WebHookManagement.scss
│ ├── Highlight.tsx
│ ├── AppCard.tsx
│ ├── HelpHeader.tsx
│ ├── UserDropDown.tsx
│ ├── AppList.tsx
│ ├── AppPage.scss
│ ├── WebHookLogsModal.tsx
│ ├── ChannelVersionList.scss
│ ├── PageWrapper.tsx
│ ├── CreateAppModal.tsx
│ └── WebHookManagement.tsx
├── App.scss
├── actions
│ ├── base.ts
│ ├── migrations.ts
│ ├── user.ts
│ └── apps.ts
├── reducers
│ ├── base.ts
│ ├── index.ts
│ ├── apps.ts
│ ├── migrations.ts
│ └── user.ts
├── template.html
├── index.tsx
├── typings
│ └── index.d.ts
├── App.tsx
└── assets
│ └── Logo.tsx
├── .dockerignore
├── .circleci
├── Dockerfile
├── publish-nucleus-docker.sh
├── publish-runner-docker.sh
└── config.yml
├── .npmignore
├── .gitignore
├── tslint.json
├── tsconfig.test.json
├── tsconfig.public.json
├── postcss.config.js
├── tsconfig.json
├── LICENSE
├── .travis.yml
├── Dockerfile
├── .cz.js
├── webpack.production.config.js
├── webpack.config.js
├── webpack.loaders.js
├── CODE_OF_CONDUCT.md
├── typings
└── index.d.ts
├── README.md
├── package.json
└── config.template.js
/src/__spec__/fixtures/.gitignore:
--------------------------------------------------------------------------------
1 | .files
2 | test.sqlite
--------------------------------------------------------------------------------
/docs/images/arch.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atlassian/nucleus/HEAD/docs/images/arch.png
--------------------------------------------------------------------------------
/public/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atlassian/nucleus/HEAD/public/favicon.png
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .files
2 | .vscode
3 | .git
4 | lib
5 | public_out
6 | scripts
7 | *.log
8 | node_modules
--------------------------------------------------------------------------------
/src/__spec__/fixtures/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atlassian/nucleus/HEAD/src/__spec__/fixtures/icon.png
--------------------------------------------------------------------------------
/docs/Architecture.md:
--------------------------------------------------------------------------------
1 | # Nucleus Architecture
2 |
3 | ## Diagram
4 |
5 | 
6 |
--------------------------------------------------------------------------------
/src/__spec__/fixtures/icon2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/atlassian/nucleus/HEAD/src/__spec__/fixtures/icon2.png
--------------------------------------------------------------------------------
/public/components/HelpHeader.scss:
--------------------------------------------------------------------------------
1 | .helpContainer {
2 | display: flex;
3 | align-items: center;
4 |
5 | .title {
6 | margin-right: 4px;
7 | }
8 | }
--------------------------------------------------------------------------------
/.circleci/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM circleci/node:8.12
2 |
3 | RUN sudo apt update && sudo apt install createrepo dpkg-dev apt-utils gnupg2 gzip -y && sudo rm -rf /var/lib/apt/lists/*
4 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | .files
2 | .vscode
3 | .git
4 | node_modules
5 | public
6 | scripts
7 | src
8 | typings
9 | /config.js
10 | /config.*.js
11 | *.sh
12 | *.yml
13 | db.sqlite
--------------------------------------------------------------------------------
/public/App.scss:
--------------------------------------------------------------------------------
1 | .loader {
2 | display: flex;
3 | justify-content: center;
4 | align-items: center;
5 | height: 100vh;
6 | width: 100vw;
7 | position: fixed;
8 | }
--------------------------------------------------------------------------------
/public/components/AppList.scss:
--------------------------------------------------------------------------------
1 | .noApps {
2 | height: calc(100vh - 200px);
3 | display: flex;
4 | align-items: center;
5 | justify-content: center;
6 | flex-direction: column;
7 | }
--------------------------------------------------------------------------------
/public/actions/base.ts:
--------------------------------------------------------------------------------
1 | export const SET_BASE_UPDATE_URL = 'SET_BASE_UPDATE_URL';
2 |
3 | export const setBaseUpdateUrl = (url: string) => ({
4 | url,
5 | type: SET_BASE_UPDATE_URL,
6 | });
7 |
--------------------------------------------------------------------------------
/public/components/WebHookLogsModal.scss:
--------------------------------------------------------------------------------
1 | .error {
2 | border-bottom: 1px solid #ccc;
3 | padding-bottom: 12px;
4 |
5 | &:last-child {
6 | border-bottom: 0;
7 | padding-bottom: 0;
8 | }
9 | }
--------------------------------------------------------------------------------
/public/actions/migrations.ts:
--------------------------------------------------------------------------------
1 | export const SET_MIGRATIONS = 'SET_MIGRATIONS';
2 |
3 | export const setMigrations = (migrations: NucleusMigration[]) => ({
4 | migrations,
5 | type: SET_MIGRATIONS,
6 | });
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | lib
2 | node_modules
3 | .filestore
4 | npm-debug.log*
5 | config.js
6 | config.*.js
7 | !config.template.js
8 | certs
9 | .vscode
10 | public_out
11 | *.log
12 | .files
13 | test_out
14 | db.sqlite
--------------------------------------------------------------------------------
/tslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "tslint-config-airbnb",
3 | "rules": {
4 | "max-line-length": false,
5 | "align": false,
6 | "import-name": false,
7 | "strict-boolean-expressions": false
8 | }
9 | }
--------------------------------------------------------------------------------
/src/files/utils/sha.ts:
--------------------------------------------------------------------------------
1 | import * as crypto from 'crypto';
2 |
3 | export const generateSHAs = (buffer: Buffer): HashSet => ({
4 | sha1: crypto.createHash('SHA1').update(buffer).digest('hex'),
5 | sha256: crypto.createHash('SHA256').update(buffer).digest('hex'),
6 | });
7 |
--------------------------------------------------------------------------------
/public/actions/user.ts:
--------------------------------------------------------------------------------
1 | export const LOG_OUT = 'LOG_OUT';
2 | export const SET_USER = 'SET_USER';
3 |
4 | export const setUser = (user: User) => ({
5 | user,
6 | type: SET_USER,
7 | });
8 |
9 | export const logOut = () => ({
10 | type: LOG_OUT,
11 | });
12 |
--------------------------------------------------------------------------------
/src/__spec__/rest.ts:
--------------------------------------------------------------------------------
1 | import * as chai from 'chai';
2 |
3 | const chaiHttp = require('chai-http');
4 |
5 | chai.use(chaiHttp);
6 |
7 | describe('Rest API', () => {
8 | require('./healthcheck_spec');
9 | require('./app_spec');
10 | require('./channel_spec');
11 | require('./webhook_spec');
12 | });
13 |
--------------------------------------------------------------------------------
/public/reducers/base.ts:
--------------------------------------------------------------------------------
1 | import * as actions from '../actions/base';
2 |
3 | const INITIAL_STATE = '';
4 |
5 | export const base = (state: string = INITIAL_STATE, action) => {
6 | switch (action.type) {
7 | case actions.SET_BASE_UPDATE_URL:
8 | return action.url;
9 | }
10 | return state;
11 | };
12 |
--------------------------------------------------------------------------------
/public/reducers/index.ts:
--------------------------------------------------------------------------------
1 | import { combineReducers } from 'redux';
2 |
3 | import { user } from './user';
4 | import { apps } from './apps';
5 | import { base } from './base';
6 | import { migrations } from './migrations';
7 |
8 | export default combineReducers({
9 | user,
10 | apps,
11 | base,
12 | migrations,
13 | });
14 |
--------------------------------------------------------------------------------
/tsconfig.test.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "es6",
5 | "outDir": "test_out",
6 | "lib": [
7 | "es6",
8 | "dom"
9 | ],
10 | "sourceMap": true,
11 | "jsx": "react",
12 | "experimentalDecorators": true
13 | },
14 | "exclude": [
15 | "node_modules",
16 | "public"
17 | ]
18 | }
--------------------------------------------------------------------------------
/public/components/PageWrapper.scss:
--------------------------------------------------------------------------------
1 | .pageWrapper {
2 | display: flex;
3 |
4 | .navContainer {
5 | a, a:hover {
6 | text-decoration: none;
7 | }
8 | flex: 0;
9 | }
10 |
11 | .pageContainer {
12 | flex: 1;
13 | padding: 16px;
14 | width: calc(100vw - 304px);
15 | box-sizing: border-box;
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/tsconfig.public.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "es5",
5 | "outDir": "lib",
6 | "lib": [
7 | "es6",
8 | "dom"
9 | ],
10 | "sourceMap": true,
11 | "rootDir": "public",
12 | "jsx": "react",
13 | "experimentalDecorators": true
14 | },
15 | "exclude": [
16 | "node_modules",
17 | "src"
18 | ]
19 | }
--------------------------------------------------------------------------------
/src/db/driver.ts:
--------------------------------------------------------------------------------
1 | import SequelizeDriver from './sequelize/SequelizeDriver';
2 |
3 | import { dbStrategy } from '../config';
4 | import BaseDriver from './BaseDriver';
5 |
6 | let driver: BaseDriver;
7 |
8 | switch (dbStrategy) {
9 | case 'sequelize':
10 | default:
11 | driver = new SequelizeDriver();
12 | break;
13 | }
14 |
15 | export default driver;
16 |
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | const autoprefixer = require('autoprefixer');
2 |
3 | const AUTOPREFIXER_BROWSERS = [
4 | 'Android 2.3',
5 | 'Android >= 4',
6 | 'Chrome >= 35',
7 | 'Firefox >= 31',
8 | 'Explorer >= 9',
9 | 'iOS >= 7',
10 | 'Opera >= 12',
11 | 'Safari >= 7.1',
12 | ];
13 |
14 | module.exports = {
15 | plugins: [
16 | autoprefixer({ browsers: AUTOPREFIXER_BROWSERS }),
17 | ],
18 | };
19 |
--------------------------------------------------------------------------------
/src/files/store.ts:
--------------------------------------------------------------------------------
1 | import S3Store from './s3/S3Store';
2 | import LocalStore from './local/LocalStore';
3 |
4 | import { fileStrategy } from '../config';
5 |
6 | let store: IFileStore;
7 |
8 | switch (fileStrategy) {
9 | case 's3':
10 | store = new S3Store();
11 | break;
12 | case 'local':
13 | default:
14 | store = new LocalStore();
15 | }
16 |
17 | export default store;
18 |
--------------------------------------------------------------------------------
/public/template.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Nucleus
6 |
7 |
8 |
9 |
10 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/src/utils/a.ts:
--------------------------------------------------------------------------------
1 | import * as express from 'express';
2 | import * as debug from 'debug';
3 |
4 | export const createA = (d: debug.IDebugger) => (handler: express.RequestHandler): express.RequestHandler => async (req, res, next) => {
5 | try {
6 | await handler(req, res, next);
7 | } catch (err) {
8 | d(`Unhandled error: ${req.url}`);
9 | d(err);
10 | res.status(500).json({ error: 'Something went wrong...' });
11 | }
12 | };
13 |
--------------------------------------------------------------------------------
/public/actions/apps.ts:
--------------------------------------------------------------------------------
1 | export const SET_APPS = 'SET_PLUGINS';
2 |
3 | const APP_REST_ENDPOINT = '/rest/app';
4 |
5 | export const fetchApps = async (): Promise => {
6 | const apps: any[] = await (await fetch(APP_REST_ENDPOINT, { credentials: 'include' })).json();
7 | return apps.map((app) => {
8 | return app;
9 | });
10 | };
11 |
12 | export const setApps = (apps: NucleusApp[]) => ({
13 | apps,
14 | type: SET_APPS,
15 | });
16 |
--------------------------------------------------------------------------------
/src/rest/auth-strategy/index.ts:
--------------------------------------------------------------------------------
1 | import { useGitHub } from './github';
2 | import { useOpenID } from './openid';
3 | import { useLocal } from './local';
4 |
5 | import { authStrategy } from '../../config';
6 |
7 | export const initializeStrategy = () => {
8 | switch (authStrategy) {
9 | case 'openid':
10 | return useOpenID();
11 | case 'github':
12 | return useGitHub();
13 | case 'local':
14 | default:
15 | return useLocal();
16 | }
17 | };
18 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "es6",
5 | "outDir": "lib",
6 | "lib": [
7 | "es6",
8 | "dom",
9 | "es7"
10 | ],
11 | "sourceMap": true,
12 | "rootDir": "src",
13 | "jsx": "react",
14 | "experimentalDecorators": true,
15 | "strictNullChecks": true,
16 | "noUnusedLocals": true,
17 | "noImplicitThis": true,
18 | "noImplicitAny": true
19 | },
20 | "exclude": [
21 | "node_modules",
22 | "public"
23 | ]
24 | }
--------------------------------------------------------------------------------
/public/components/PageLoader.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | import AkSpinner from '@atlaskit/spinner';
4 |
5 | import * as styles from './PageLoader.scss';
6 |
7 | export default class PageLoader extends React.PureComponent<{ visible: boolean }, {}> {
8 | render() {
9 | return (
10 |
13 | );
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/public/components/CreateAppModal.scss:
--------------------------------------------------------------------------------
1 | .file {
2 | width: 100%;
3 |
4 | > * {
5 | width: 100%;
6 | }
7 |
8 | input {
9 | width: 100%;
10 | outline: 0;
11 | }
12 | }
13 |
14 | .loaderWrapper {
15 | position: relative;
16 |
17 | .loader {
18 | position: absolute;
19 | z-index: 999;
20 | background: rgba(255, 255, 255, 0.7);
21 | width: 100%;
22 | height: 100%;
23 | display: flex;
24 | align-items: center;
25 | justify-content: center;
26 | }
27 | }
--------------------------------------------------------------------------------
/public/reducers/apps.ts:
--------------------------------------------------------------------------------
1 | import * as actions from '../actions/apps';
2 |
3 | const INITIAL_STATE = null;
4 |
5 | export const apps = (state: AppsSubState = INITIAL_STATE, action) => {
6 | let newState: AppsSubState = state;
7 | switch (action.type) {
8 | case actions.SET_APPS:
9 | if (Array.isArray(action.apps)) {
10 | newState = action.apps;
11 | } else {
12 | newState = state.concat(action.apps);
13 | }
14 | break;
15 | }
16 | return newState;
17 | };
18 |
--------------------------------------------------------------------------------
/public/reducers/migrations.ts:
--------------------------------------------------------------------------------
1 | import * as actions from '../actions/migrations';
2 |
3 | const INITIAL_STATE = {
4 | items: [],
5 | hasPendingMigration: false,
6 | };
7 |
8 | export const migrations = (state: MigrationSubState = INITIAL_STATE, action) => {
9 | switch (action.type) {
10 | case actions.SET_MIGRATIONS:
11 | return {
12 | items: action.migrations,
13 | hasPendingMigration: !!action.migrations.find(migration => !migration.complete),
14 | };
15 | }
16 | return state;
17 | };
18 |
--------------------------------------------------------------------------------
/public/components/PageLoader.scss:
--------------------------------------------------------------------------------
1 | .spinnerContainer {
2 | position: fixed;
3 | z-index: 999;
4 | background: rgba(255, 255, 255, 0.7);
5 | top: 0;
6 | left: 304px;
7 | width: calc(100vw - 304px);
8 | height: 100vh;
9 | display: flex;
10 | justify-content: center;
11 | align-items: center;
12 | opacity: 0;
13 | pointer-events: none;
14 | transition: opacity 0.4s linear;
15 |
16 | &.visible {
17 | opacity: 1;
18 | pointer-events: all;
19 | }
20 | }
--------------------------------------------------------------------------------
/src/migrations/index.ts:
--------------------------------------------------------------------------------
1 | import { MigrationStore } from './BaseMigration';
2 | import FileIndexMigration from './file-index/FileIndexMigration';
3 | import LatestInstallerMigration from './latest-installer/LatestInstallerMigration';
4 | import FileSHAMigration from './file-sha/FileSHAMigration';
5 |
6 | export const registerMigrations = async () => {
7 | await MigrationStore.register(new FileIndexMigration());
8 | await MigrationStore.register(new LatestInstallerMigration());
9 | await MigrationStore.register(new FileSHAMigration());
10 | };
11 |
--------------------------------------------------------------------------------
/public/reducers/user.ts:
--------------------------------------------------------------------------------
1 | import * as actions from '../actions/user';
2 |
3 | const INITIAL_STATE = {
4 | signedIn: false,
5 | };
6 |
7 | export const user = (state: UserSubState = INITIAL_STATE, action) => {
8 | let newState: UserSubState = state;
9 | switch (action.type) {
10 | case actions.LOG_OUT:
11 | newState = INITIAL_STATE;
12 | break;
13 | case actions.SET_USER:
14 | newState = {
15 | user: action.user,
16 | signedIn: !!action.user,
17 | };
18 | break;
19 | }
20 | return newState;
21 | };
22 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2017 Atlassian Pty Ltd
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
--------------------------------------------------------------------------------
/src/rest/auth-strategy/github.ts:
--------------------------------------------------------------------------------
1 | import * as passport from 'passport';
2 | import { Strategy as GitHubStrategy } from 'passport-github';
3 |
4 | import { baseURL, github, adminIdentifiers } from '../../config';
5 |
6 | export const useGitHub = () => {
7 | passport.use(new GitHubStrategy({
8 | clientID: github.clientID,
9 | clientSecret: github.clientSecret,
10 | callbackURL: `${baseURL}/rest/auth/callback`,
11 | }, (accessToken, refreshToken, profile: any, cb) => {
12 | profile.isAdmin = adminIdentifiers.indexOf(profile.username) !== -1;
13 | cb(null, profile);
14 | }));
15 | return 'github';
16 | };
17 |
--------------------------------------------------------------------------------
/src/files/utils/__spec__/sha_spec.ts:
--------------------------------------------------------------------------------
1 | import { expect } from 'chai';
2 |
3 | import { generateSHAs } from '../sha';
4 |
5 | describe('generateSHAs', () => {
6 | it('should hash the given buffer', () => {
7 | expect(generateSHAs(Buffer.from('abc'))).to.deep.equal({
8 | sha1: 'a9993e364706816aba3e25717850c26c9cd0d89d',
9 | sha256: 'ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad',
10 | });
11 | });
12 |
13 | it('should hash empty buffers', () => {
14 | expect(generateSHAs(Buffer.alloc(0))).to.deep.equal({
15 | sha1: 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
16 | sha256: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
17 | });
18 | });
19 | });
20 |
--------------------------------------------------------------------------------
/src/rest/auth-strategy/_types.ts:
--------------------------------------------------------------------------------
1 | import * as passport from 'passport';
2 |
3 | class PassportStrategy implements passport.Strategy {
4 | constructor(options: O, cb: C) {}
5 | // Fake authenticate impl
6 | authenticate() {}
7 | }
8 | type PassportCallback = (err: null, user: U | false, error?: { message: string }) => void;
9 | interface OpenIDStrategyOptions {
10 | returnURL: string;
11 | realm: string;
12 | providerURL: string;
13 | stateless: boolean;
14 | profile: boolean;
15 | }
16 | type OpenIDCallback = (identifer: string, profile: passport.Profile, cb: PassportCallback) => void;
17 |
18 | export class OpenIDStrategyType extends PassportStrategy<
19 | OpenIDStrategyOptions,
20 | OpenIDCallback
21 | > {}
22 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: true
2 | language: node_js
3 | node_js:
4 | - "8"
5 | cache: yarn
6 | env:
7 | global:
8 | COMMIT=${TRAVIS_COMMIT::8}
9 |
10 | before_install: sudo apt-get install -y createrepo dpkg-dev apt-utils gnupg2 gzip
11 | before_script: cp config.template.js config.js
12 | script: yarn lint && yarn build-server && yarn test
13 |
14 | after_success:
15 | - docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
16 | - export REPO=atlassian/nucleus
17 | - export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
18 | - docker build -f Dockerfile -t $REPO:$COMMIT .
19 | - docker tag $REPO:$COMMIT $REPO:$TAG
20 | - docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
21 | - docker push $REPO
--------------------------------------------------------------------------------
/src/rest/auth-strategy/local.ts:
--------------------------------------------------------------------------------
1 | import * as passport from 'passport';
2 | import { BasicStrategy } from 'passport-http';
3 |
4 | import { localAuth, adminIdentifiers } from '../../config';
5 |
6 | export const useLocal = () => {
7 | passport.use(new BasicStrategy((username, password, done) => {
8 | for (const user of localAuth) {
9 | if (user.username === username && user.password === password) {
10 | const nucleusUser: User = {
11 | id: user.username,
12 | displayName: user.displayName,
13 | isAdmin: adminIdentifiers.indexOf(user.username) !== -1,
14 | photos: [
15 | { value: user.photo },
16 | ],
17 | };
18 | return done(null, nucleusUser);
19 | }
20 | }
21 | return done(null, false);
22 | }));
23 | return 'basic';
24 | };
25 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:8
2 |
3 | RUN apt update && apt install createrepo dpkg-dev apt-utils gnupg2 gzip -y && rm -rf /var/lib/apt/lists/*
4 |
5 | WORKDIR /opt/service
6 |
7 | # Copy PJ, changes should invalidate entire image
8 | COPY package.json yarn.lock /opt/service/
9 |
10 |
11 | # Copy commong typings
12 | COPY typings /opt/service/typings
13 |
14 | # Copy TS configs
15 | COPY tsconfig* /opt/service/
16 |
17 | # Build backend
18 | COPY src /opt/service/src
19 |
20 | # Build Frontend
21 |
22 | COPY public /opt/service/public
23 | COPY webpack.*.js postcss.config.js README.md /opt/service/
24 |
25 | # Install dependencies
26 | RUN yarn --cache-folder ../ycache && yarn build:server && yarn build:fe:prod && yarn --production --cache-folder ../ycache && rm -rf ../ycache
27 |
28 | EXPOSE 8080
29 |
30 | ENTRYPOINT ["npm", "run", "start:server:prod", "--"]
--------------------------------------------------------------------------------
/src/files/utils/tmp.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'fs-extra';
2 | import * as os from 'os';
3 | import * as path from 'path';
4 |
5 | export const withTmpDir = async (fn: (tmpDir: string) => Promise) => {
6 | let createdDir = '';
7 | if (process.platform === 'darwin') {
8 | await fs.mkdirs(path.resolve('/tmp', 'nucleus'));
9 | createdDir = await fs.mkdtemp(path.resolve('/tmp', 'nucleus', 'wd-'));
10 | } else {
11 | createdDir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'nucleus-wd-'));
12 | }
13 | const cleanup = async () => {
14 | if (await fs.pathExists(createdDir)) {
15 | await fs.remove(createdDir);
16 | }
17 | };
18 | let result: T;
19 | try {
20 | result = await fn(createdDir);
21 | } catch (err) {
22 | await cleanup();
23 | throw err;
24 | }
25 | await cleanup();
26 | return result;
27 | };
28 |
--------------------------------------------------------------------------------
/public/components/AppCard.scss:
--------------------------------------------------------------------------------
1 | .appContainer {
2 | float: left;
3 | width: 33.33%;
4 | padding: 8px 16px;
5 | box-sizing: border-box;
6 |
7 | .app {
8 | height: 256px;
9 | box-sizing: border-box;
10 | padding: 10px 15px;
11 | border-radius: 3px;
12 | color: inherit;
13 | cursor: pointer;
14 | height: 100%;
15 | box-shadow: 0 4px 8px -2px rgba(9,30,66,.28), 0 0 1px rgba(9,30,66,.3);
16 | text-align: center;
17 |
18 | &:hover {
19 | background: #f4f5f7;
20 | }
21 |
22 | .imgContainer {
23 | margin-top: 24px;
24 | height: 160px;
25 | display: flex;
26 | flex-direction: column;
27 | align-items: center;
28 | justify-content: center;
29 |
30 | > img {
31 | max-width: 40%;
32 | max-height: 160px;
33 | }
34 | }
35 |
36 | > h4 {
37 | margin-bottom: 16px;
38 | text-align: center;
39 | }
40 | }
41 | }
--------------------------------------------------------------------------------
/docs/Endpoints.md:
--------------------------------------------------------------------------------
1 | # Internal Endpoints
2 |
3 | ## `/healthcheck`
4 |
5 | Method: GET
6 | Authentication: None
7 |
8 | This endpoints performs no tasks and simply returns immediately with 200 OK.
9 | You should use it to determine if Nucleus is still alive and running.
10 |
11 | ## `/deepcheck`
12 |
13 | Method: GET
14 | Authentication: None
15 |
16 | This endpoint performs two simple checks to ensure Nucleus is configured correctly.
17 |
18 | * A database connection test, simply attempts to connect to the configured DB
19 | * A file store connection test, simply attempts to put, get and delete files in the store
20 |
21 | You should only use this endpoint when you first launch Nucleus to validate your
22 | config is OK and Nucleus will operate successfully. If you get any response that isn't
23 | 200 OK something went wrong.
24 |
25 | ## `/rest/app/:appId/channel/:channelId/upload`
26 |
27 | See the [Uploading Docs](Uploading.md) for more information on this endpoint
28 |
--------------------------------------------------------------------------------
/public/index.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { render } from 'react-dom';
3 | import { AppContainer } from 'react-hot-loader';
4 | import { Provider } from 'react-redux';
5 | import { createStore } from 'redux';
6 |
7 | import App from './App';
8 | import reducers from './reducers';
9 |
10 | import '@atlaskit/css-reset/dist/bundle.css';
11 |
12 | const store = createStore(reducers);
13 |
14 | render(
15 |
16 |
17 |
18 |
19 | ,
20 | document.querySelector('#app'),
21 | );
22 |
23 | if (module && module.hot) {
24 | module.hot.accept('./App.tsx', () => {
25 | /* tslint:disable */
26 | const NewApp = require('./App').default;
27 | /* tslint:enable */
28 | render(
29 |
30 |
31 |
32 |
33 | ,
34 | document.querySelector('#app'),
35 | );
36 | });
37 | }
38 |
--------------------------------------------------------------------------------
/src/rest/admin.ts:
--------------------------------------------------------------------------------
1 | import * as debug from 'debug';
2 | import * as express from 'express';
3 |
4 | import driver from '../db/driver';
5 | import { createA } from '../utils/a';
6 | import Positioner from '../files/Positioner';
7 | import store from '../files/store';
8 |
9 | const d = debug('nucleus:rest:admin');
10 | const a = createA(d);
11 |
12 | const adminRouter = express();
13 |
14 | adminRouter.get('/release-locks', a(async (req, res) => {
15 | const apps = await driver.getApps();
16 | const positioner = new Positioner(store);
17 |
18 | d(`admin user ${req.user.id} is clearing all existing locks`);
19 |
20 | for (const app of apps) {
21 | const lock = await positioner.currentLock(app);
22 | if (lock) {
23 | d('clearing lock for app:', app.slug);
24 | await positioner.releaseLock(app, lock);
25 | }
26 | }
27 |
28 | d('locks cleared');
29 |
30 | res.json({
31 | success: 'Locks cleared',
32 | });
33 | }));
34 |
35 | export default adminRouter;
36 |
--------------------------------------------------------------------------------
/public/components/MigrationList.scss:
--------------------------------------------------------------------------------
1 | .migrationContainer {
2 | padding: 8px 16px;
3 | box-sizing: border-box;
4 |
5 | .migration {
6 | box-sizing: border-box;
7 | padding: 10px 15px;
8 | border-radius: 3px;
9 | color: inherit;
10 | height: 100%;
11 | box-shadow: 0 4px 8px -2px rgba(9,30,66,.28), 0 0 1px rgba(9,30,66,.3);
12 |
13 | &:hover {
14 | background: #f4f5f7;
15 | }
16 |
17 | .migrationActions {
18 | display: flex;
19 | margin-top: 8px;
20 | }
21 |
22 | .migrationProgress {
23 | padding: 8px;
24 |
25 | .migrationProgressWrapper {
26 | width: 100%;
27 | height: 10px;
28 | background-color: #DFE1E6;
29 | border-radius: 5px;
30 | margin-top: 8px;
31 |
32 | .migrationProgressInner {
33 | background-color: #0065FA;
34 | transition: width 0.6s ease-in-out;
35 | height: 100%;
36 | border-radius: 5px;
37 | }
38 | }
39 | }
40 | }
41 | }
--------------------------------------------------------------------------------
/src/files/utils/spawn.ts:
--------------------------------------------------------------------------------
1 | import * as cp from 'child-process-promise';
2 |
3 | export const spawnPromiseAndCapture = async (command: string, args: string[], opts: any = {}): Promise<[Buffer, Buffer, Error | null]> => {
4 | const stdout: Buffer[] = [];
5 | const stderr: Buffer[] = [];
6 | const child = cp.spawn(command, args, opts);
7 | child.childProcess.stdout.on('data', (data: Buffer) => stdout.push(data));
8 | child.childProcess.stderr.on('data', (data: Buffer) => stderr.push(data));
9 | let error: Error | null = null;
10 | try {
11 | await child;
12 | } catch (err) {
13 | error = err;
14 | }
15 | return [Buffer.concat(stdout), Buffer.concat(stderr), error];
16 | };
17 |
18 | export const escapeShellArguments = (args: string[]): string[] => {
19 | return args.map((value) => {
20 | if (value.indexOf(' ') > -1) {
21 | if (value.indexOf('"') > -1) {
22 | throw new Error(`Unable to escape parameter: ${value}`);
23 | }
24 | return `"${value}"`;
25 | }
26 | return value;
27 | });
28 | };
29 |
--------------------------------------------------------------------------------
/public/components/WebHookManagement.scss:
--------------------------------------------------------------------------------
1 | .container {
2 | margin-top: 16px;
3 |
4 | .createWebHookContainer {
5 | display: flex;
6 |
7 | .createWebHookInput {
8 | flex: 1;
9 | display: flex;
10 |
11 | > div {
12 | flex: 1;
13 | padding-right: 16px;
14 |
15 | > div {
16 | width: 100%;
17 | }
18 | }
19 | }
20 |
21 | .createWebHookButton {
22 | display: flex;
23 | align-items: flex-end;
24 | padding-bottom: 4px;
25 | }
26 | }
27 |
28 | .webHookList {
29 | margin-top: 8px;
30 |
31 | .webHook {
32 | padding: 2px 8px;
33 | display: flex;
34 | align-items: center;
35 |
36 | > :first-child {
37 | margin-left: 0;
38 | margin-right: 8px;
39 | }
40 |
41 | > .url {
42 | flex: 1;
43 | overflow: hidden;
44 | text-overflow: ellipsis;
45 | }
46 |
47 | > :not(.url) {
48 | margin-left: 8px;
49 | }
50 | }
51 | }
52 |
53 | .verticalSpinner {
54 | display: flex;
55 | width: 53.19px;
56 | justify-content: center;
57 | }
58 | }
--------------------------------------------------------------------------------
/.circleci/publish-nucleus-docker.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ########################################################################
4 | # #
5 | # This script requires the following env vars set #
6 | # #
7 | # DOCKER_USER: Username with push access to atlassian dockerhub #
8 | # DOCKER_PASSWORD: Password for user with push access to dockerhub #
9 | # #
10 | ########################################################################
11 |
12 | # First 8 characters of SHA
13 | export COMMIT=${CIRCLE_SHA1:0:8}
14 | # Docker Hub Repo
15 | export REPO=atlassian/nucleus
16 | # Target tag, latest on master, branch otherwise
17 | export TAG=`if [ "$CIRCLE_BRANCH" == "master" ]; then echo "latest"; else echo $CIRCLE_BRANCH ; fi`
18 |
19 | docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
20 | docker build -f Dockerfile -t $REPO:$COMMIT .
21 | docker tag $REPO:$COMMIT $REPO:$TAG
22 | docker tag $REPO:$COMMIT $REPO:circle-$CIRCLE_BUILD_NUM
23 | docker push $REPO
--------------------------------------------------------------------------------
/src/files/utils/sync.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'fs-extra';
2 | import * as path from 'path';
3 |
4 | export const syncDirectoryToStore = async (store: IFileStore, keyPrefix: string, localBaseDir: string, relative: string = '.') => {
5 | for (const child of await fs.readdir(path.resolve(localBaseDir, relative))) {
6 | const absoluteChild = path.resolve(localBaseDir, relative, child);
7 | if ((await fs.stat(absoluteChild)).isDirectory()) {
8 | await syncDirectoryToStore(store, keyPrefix, localBaseDir, path.join(relative, child));
9 | } else {
10 | await store.putFile(
11 | path.posix.join(keyPrefix, relative, child),
12 | await fs.readFile(absoluteChild),
13 | true,
14 | );
15 | }
16 | }
17 | };
18 |
19 | export const syncStoreToDirectory = async (store: IFileStore, keyPrefix: string, localDir: string) => {
20 | for (const key of await store.listFiles(keyPrefix)) {
21 | const relativeKey = key.substr(keyPrefix.length + 1);
22 | const localPath = path.resolve(localDir, relativeKey);
23 | await fs.mkdirs(path.dirname(localPath));
24 | await fs.writeFile(
25 | localPath,
26 | await store.getFile(key),
27 | );
28 | }
29 | };
30 |
--------------------------------------------------------------------------------
/public/components/Highlight.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import * as ReactDOM from 'react-dom';
3 | import * as hljs from 'highlight.js/lib/highlight';
4 | import * as bashLanguage from 'highlight.js/lib/languages/bash';
5 | import * as javascriptLanguage from 'highlight.js/lib/languages/javascript';
6 |
7 | export default class Highlight extends React.PureComponent<{
8 | className: string;
9 | }, null> {
10 | componentDidMount() {
11 | this.highlightCode();
12 | }
13 |
14 | componentDidUpdate() {
15 | this.highlightCode();
16 | }
17 |
18 | highlightCode() {
19 | const { className } = this.props;
20 | const domNode = ReactDOM.findDOMNode(this);
21 | const nodes = domNode.querySelectorAll('pre code');
22 |
23 | hljs.registerLanguage('javascript', javascriptLanguage);
24 | hljs.registerLanguage('bash', bashLanguage);
25 |
26 | let i;
27 | for (i = 0; i < nodes.length; i += 1) {
28 | hljs.highlightBlock(nodes[i]);
29 | }
30 | }
31 |
32 | render() {
33 | const { children, className } = this.props;
34 | return (
35 |
36 |
37 | {children}
38 |
39 |
40 | );
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/.cz.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | types: [
3 | {value: 'feat', name: 'feat: A new feature'},
4 | {value: 'fix', name: 'fix: A bug fix'},
5 | {value: 'docs', name: 'docs: Documentation only changes'},
6 | {value: 'style', name: 'style: Changes that do not affect the meaning of the code\n (white-space, formatting, missing semi-colons, etc)'},
7 | {value: 'refactor', name: 'refactor: A code change that neither fixes a bug nor adds a feature'},
8 | {value: 'perf', name: 'perf: A code change that improves performance'},
9 | {value: 'test', name: 'test: Adding missing tests'},
10 | {value: 'chore', name: 'chore: Changes to the build process or auxiliary tools\n and libraries such as documentation generation'},
11 | {value: 'revert', name: 'revert: Revert to a commit'},
12 | {value: 'WIP', name: 'WIP: Work in progress'},
13 | ],
14 | scopes: [
15 | { name: 'public' },
16 | { name: 'api' },
17 | { name: 'file-store' },
18 | { name: 'db' },
19 | { name: 'core-platform' },
20 | { name: 'tests' },
21 | { name: 'tooling' },
22 | ],
23 | allowCustomScopes: true,
24 | allowBreakingChanges: ['feat', 'fix'],
25 | }
--------------------------------------------------------------------------------
/webpack.production.config.js:
--------------------------------------------------------------------------------
1 | const webpack = require('webpack');
2 |
3 | const ExtractTextPlugin = require('extract-text-webpack-plugin');
4 | const WebpackCleanupPlugin = require('webpack-cleanup-plugin');
5 |
6 | const config = require('./webpack.config');
7 |
8 | // Remove React Hot Loader Patch
9 | config.entry.shift();
10 |
11 | // Hash all JS assets
12 | config.output.filename = 'core.[chunkhash].min.js';
13 |
14 | // Remove devServer config
15 | delete config.devServer;
16 |
17 | // Remove NoEmitOnErrors, HotModuleReplacement and Dashboard plugins
18 | config.plugins.shift();
19 | config.plugins.shift();
20 | config.plugins.shift();
21 |
22 | // Remove source mapping
23 | config.devtool = false;
24 |
25 | // Add production plugins
26 | config.plugins.unshift(
27 | new WebpackCleanupPlugin(),
28 | new webpack.DefinePlugin({
29 | 'process.env': {
30 | NODE_ENV: '"production"',
31 | },
32 | }),
33 | new webpack.optimize.UglifyJsPlugin({
34 | compress: {
35 | warnings: false,
36 | screw_ie8: true,
37 | drop_console: true,
38 | drop_debugger: true,
39 | },
40 | }),
41 | new ExtractTextPlugin({
42 | filename: '[contenthash].css',
43 | allChunks: true,
44 | }));
45 |
46 | module.exports = config;
--------------------------------------------------------------------------------
/public/typings/index.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
3 | interface Application {
4 | name: string;
5 | }
6 |
7 | interface UserSubState {
8 | user?: User;
9 | signedIn: boolean;
10 | }
11 |
12 | interface AppSubState extends Application {}
13 | type AppsSubState = NucleusApp[];
14 | interface MigrationSubState {
15 | items: NucleusMigration[];
16 | hasPendingMigration: boolean;
17 | }
18 |
19 | interface AppState {
20 | user: UserSubState;
21 | apps: AppsSubState;
22 | base: string;
23 | migrations: MigrationSubState;
24 | }
25 |
26 | declare module '*.scss' {
27 | const content: {
28 | [className: string]: string;
29 | };
30 | export = content;
31 | }
32 |
33 | declare module '*.css' {
34 | const content: undefined;
35 | export = content;
36 | }
37 |
38 | declare module '*.png' {
39 | const content: string;
40 | export = content;
41 | }
42 |
43 | declare module '@atlaskit/field-base' {
44 | const foo: any;
45 | export const Label: any;
46 | export default foo;
47 | }
48 |
49 | declare module '@atlaskit/multi-select' {
50 | export const MultiSelectStateless: any;
51 | }
52 |
53 | declare module '@atlaskit/field-text' {
54 | const foo: any;
55 | export const FieldTextStateless: any;
56 | export default foo;
57 | }
--------------------------------------------------------------------------------
/.circleci/publish-runner-docker.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | ########################################################################
4 | # #
5 | # This script requires the following env vars set #
6 | # #
7 | # DOCKER_USER: Username with push access to atlassian dockerhub #
8 | # DOCKER_PASSWORD: Password for user with push access to dockerhub #
9 | # #
10 | ########################################################################
11 |
12 | # Please note that updating this file from a fork will not have any affect
13 | # the runner is built during the build and can't be published from fork PRs
14 |
15 | if [[ -z "${DOCKER_PASSWORD}" ]]; then
16 | echo No docker creds set, skipping the runner build
17 | exit 0
18 | fi
19 |
20 | # First 8 characters of SHA
21 | export COMMIT=${CIRCLE_SHA1:0:8}
22 | # Docker Hub Repo
23 | export REPO=samatlassian/nucleus-ci-runner
24 |
25 | cd .circleci
26 | docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
27 | docker build -f Dockerfile -t $REPO:$COMMIT .
28 | docker tag $REPO:$COMMIT $REPO:latest
29 | docker push $REPO
30 |
--------------------------------------------------------------------------------
/src/rest/auth-strategy/openid.ts:
--------------------------------------------------------------------------------
1 | import * as passport from 'passport';
2 | const { Strategy } = require('passport-openid');
3 |
4 | import { baseURL, openid, adminIdentifiers } from '../../config';
5 | import { OpenIDStrategyType } from './_types';
6 |
7 | const noop = () => '';
8 | /* tslint:disable */
9 | const OpenIDStrategy: typeof OpenIDStrategyType = Strategy;
10 | /* tslint:enable */
11 |
12 | export const useOpenID = () => {
13 | passport.use(new OpenIDStrategy({
14 | returnURL: `${baseURL}/rest/auth/callback`,
15 | realm: openid.realm,
16 | providerURL: openid.providerURL,
17 | stateless: openid.stateless,
18 | profile: openid.profile,
19 | }, (identifier, profile, cb) => {
20 | const email = (profile.emails || []).filter(email => (new RegExp(`@${openid.domain}$`)).test(email.value))[0];
21 | if (!email) {
22 | return cb(null, false, { message: `Not an @${openid.domain} email address.` });
23 | }
24 |
25 | const user: User = {
26 | id: email.value,
27 | displayName: profile.displayName,
28 | isAdmin: adminIdentifiers.indexOf(email.value) !== -1,
29 | photos: [
30 | { value: (openid.photoResolver || noop)(email.value) },
31 | ],
32 | };
33 |
34 | cb(null, user);
35 | }));
36 | return 'openid';
37 | };
38 |
--------------------------------------------------------------------------------
/src/files/utils/p-queue.ts:
--------------------------------------------------------------------------------
1 | export const runPQ = async (items: T[], executor: (item: T) => R, simultaneous = 5): Promise => {
2 | if (simultaneous <= 0) {
3 | throw new Error('Simultaneous value must be greater than 0');
4 | }
5 | const returns: R[] = [];
6 | let currentIndex = 0;
7 | let currentlyRunning = 0;
8 |
9 | let done: (err?: any) => void;
10 | let isDone = false;
11 | const promise = new Promise((resolve, reject) => {
12 | done = (err?: any) => {
13 | isDone = true;
14 | if (err) return reject(err);
15 | resolve();
16 | };
17 | });
18 |
19 | const run = async () => {
20 | if (isDone) return;
21 | currentlyRunning += 1;
22 | if (currentIndex >= items.length) {
23 | currentlyRunning -= 1;
24 | if (currentlyRunning === 0) {
25 | done();
26 | }
27 | return;
28 | }
29 |
30 | const i = currentIndex;
31 | currentIndex += 1;
32 | try {
33 | returns[i] = await executor(items[i]);
34 | } catch (err) {
35 | return done(err);
36 | }
37 | currentlyRunning -= 1;
38 |
39 | process.nextTick(run);
40 | };
41 |
42 | for (let t = 0; t < Math.min(items.length, simultaneous); t += 1) {
43 | run();
44 | }
45 |
46 | await promise;
47 | return returns;
48 | };
49 |
--------------------------------------------------------------------------------
/public/components/AppCard.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { connect } from 'react-redux';
3 | import { Link } from 'react-router';
4 |
5 | import * as styles from './AppCard.scss';
6 | import * as favicon from '../favicon.png';
7 |
8 | export interface AppCardReduxProps {
9 | baseUpdateUrl: string;
10 | }
11 |
12 | export interface AppCardDispatchProps {}
13 |
14 | export interface AppCardComponentProps {
15 | app: NucleusApp;
16 | }
17 |
18 | class AppCard extends React.PureComponent {
19 | private badImage = (e) => {
20 | e.currentTarget.src = favicon;
21 | }
22 |
23 | render() {
24 | return (
25 |
26 |
27 |
28 |
29 |
30 |
31 |
{this.props.app.name}
32 |
33 |
34 |
35 | );
36 | }
37 | }
38 |
39 | const mapStateToProps = state => ({
40 | baseUpdateUrl: state.base,
41 | });
42 |
43 | export default connect(mapStateToProps, null)(AppCard);
44 |
--------------------------------------------------------------------------------
/src/rest/_helpers.ts:
--------------------------------------------------------------------------------
1 | import * as debug from 'debug';
2 | import * as express from 'express';
3 |
4 | import driver from '../db/driver';
5 |
6 | const d = debug('nucleus:rest:helpers');
7 |
8 | export const requireLogin: express.RequestHandler = (req, res, next) => {
9 | if (!req.user) {
10 | d(`Unauthenticated user attempted to access: ${req.url}`);
11 | return res.status(403).json({ error: 'Forbidden' });
12 | }
13 | next();
14 | };
15 |
16 | export const requireAdmin: express.RequestHandler = (req, res, next) => {
17 | return requireLogin(req, res, () => {
18 | if (!req.user.isAdmin) {
19 | d(`Non admin user attempted to access: ${req.url}`);
20 | return res.status(403).json({ error: 'Forbidden' });
21 | }
22 | next();
23 | });
24 | };
25 |
26 | export const noPendingMigrations: express.RequestHandler = async (req, res, next) => {
27 | try {
28 | const migrations = await driver.getMigrations();
29 | if (migrations.find(m => !m.complete)) {
30 | return res.status(401).json({
31 | error: 'There is a pending migration, this endpoint has been disabled',
32 | });
33 | }
34 | } catch (err) {
35 | d('error fetching migrations', err);
36 | return res.status(500).json({ error: 'This endpoint relies on no pending migrations but we failed to list migrations' });
37 | }
38 | next();
39 | };
40 |
--------------------------------------------------------------------------------
/docs/Uploading.md:
--------------------------------------------------------------------------------
1 | # Uploading Releases
2 |
3 | ## Easy Way
4 |
5 | The easiest way to upload releases to Nucleus is to use [`electron-forge`](https://github.com/electron-userland/electron-forge)
6 | to build and publish your application. You will find the config required
7 | on your App's page inside Nucleus.
8 |
9 | Check out the publisher documentation at [https://v6.electronforge.io/publishers/nucleus](https://v6.electronforge.io/publishers/nucleus)
10 |
11 | ## Custom Way
12 |
13 | There is a upload endpoint inside Nucleus, you simply hit it with the
14 | parameters outlined below as a POST request with a FormData body.
15 |
16 | ```
17 | POST: /rest/app/:appId/channel/:channelId/upload
18 | Headers:
19 | Authorization:
20 | BODY:
21 | platform: String - One of 'darwin', 'win32' and 'linux'
22 | arch: String - One of 'ia32' and 'x64'
23 | version: String
24 | FILES:
25 | : File
26 | ```
27 |
28 | Please note that any files you wish to release must be attached to
29 | the body of the request, you can use any key you want to add the
30 | file to the body.
31 |
32 | Any non-200 status code means something went wrong, a helpful error
33 | message is normally included in the response.
34 |
35 | See the [Nucleus Publisher](https://github.com/electron-userland/electron-forge/blob/master/packages/publisher/nucleus/src/PublisherNucleus.ts) for a JS code example of uploading to Nucleus.
--------------------------------------------------------------------------------
/src/migrations/BaseMigration.ts:
--------------------------------------------------------------------------------
1 | import * as debug from 'debug';
2 |
3 | import driver from '../db/driver';
4 |
5 | const d = debug('nucleus:migration-store');
6 |
7 | export interface MigrationItem {
8 | done: boolean;
9 | data: T;
10 | }
11 |
12 | export class MigrationStore {
13 | private static migrationMap: Map> = new Map();
14 |
15 | static async register(migration: BaseMigration) {
16 | d('registering a new migration:', migration.key);
17 | await driver.addMigrationIfNotExists(migration);
18 | MigrationStore.migrationMap.set(migration.key, migration);
19 | }
20 |
21 | static get(key: string) {
22 | return MigrationStore.migrationMap.get(key) || null;
23 | }
24 |
25 | static getMap() {
26 | const map: { [key: string]: BaseMigration } = {};
27 | for (const entry of MigrationStore.migrationMap.entries()) {
28 | map[entry[0]] = entry[1];
29 | }
30 | Object.freeze(map);
31 | return map;
32 | }
33 | }
34 |
35 | export default abstract class BaseMigration {
36 | abstract key: string;
37 | abstract friendlyName: string;
38 | dependsOn: string[] = [];
39 | private dMem: debug.IDebugger | null = null;
40 |
41 | protected get d() {
42 | if (!this.dMem) {
43 | this.dMem = debug(`nucleus:migration:${this.key}`);
44 | }
45 | return this.dMem;
46 | }
47 |
48 | abstract async getItems(): Promise[]>;
49 |
50 | abstract async runOnItem(item: MigrationItem): Promise;
51 | }
52 |
--------------------------------------------------------------------------------
/public/components/HelpHeader.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | import AkInlineDialog from '@atlaskit/inline-dialog';
4 | import QuestionIcon from '@atlaskit/icon/glyph/question-circle';
5 |
6 | import * as styles from './HelpHeader.scss';
7 |
8 | export interface HelpHeaderProps {
9 | help: React.ReactNode;
10 | title: string;
11 | position?: string;
12 | }
13 |
14 | export interface HelpHeaderState {
15 | helpOpen: boolean;
16 | }
17 |
18 | export default class HelpHeader extends React.PureComponent {
19 | state = {
20 | helpOpen: false,
21 | };
22 |
23 | closeHelp = () => this.setState({
24 | helpOpen: false,
25 | })
26 |
27 | toggleHelp = () => this.setState({
28 | helpOpen: !this.state.helpOpen,
29 | })
30 |
31 | render() {
32 | return (
33 |
34 |
{this.props.title}
35 |
38 | {this.props.help}
39 |
40 | }
41 | position={this.props.position || 'right middle'}
42 | isOpen={this.state.helpOpen}
43 | onClose={this.closeHelp}
44 | shouldFlip
45 | >
46 |
47 |
48 |
49 |
50 |
51 | );
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/files/utils/gpg.ts:
--------------------------------------------------------------------------------
1 | import * as cp from 'child-process-promise';
2 | import * as fs from 'fs-extra';
3 | import * as path from 'path';
4 |
5 | import { spawnPromiseAndCapture } from './spawn';
6 | import { withTmpDir } from './tmp';
7 | import * as config from '../../config';
8 |
9 | export const gpgSign = async (file: string, out: string) => {
10 | await withTmpDir(async (tmpDir) => {
11 | const key = path.resolve(tmpDir, 'key.asc');
12 | await fs.writeFile(key, config.gpgSigningKey);
13 | const [stdout, stderr] = await spawnPromiseAndCapture('gpg', ['--import', key]);
14 | try { await fs.remove(out); } catch (err) {}
15 | const keyImport = stdout.toString() + '--' + stderr.toString();
16 | const keyMatch = keyImport.match(/ key ([A-Za-z0-9]+):/);
17 | if (!keyMatch || !keyMatch[1]) {
18 | console.error(JSON.stringify(keyImport));
19 | throw new Error('Bad GPG import');
20 | }
21 | const keyId = keyMatch[1];
22 | await cp.spawn('gpg', ['-abs', '--default-key', keyId, '-o', out, file]);
23 | });
24 | };
25 |
26 | export const isGpgKeyValid = async () => {
27 | if (!config.gpgSigningKey) return false;
28 | return await withTmpDir(async (tmpDir) => {
29 | const testFile = path.resolve(tmpDir, 'test_file');
30 | const outFile = path.resolve(tmpDir, 'out_file');
31 | await fs.writeFile(testFile, 'foobar');
32 | try {
33 | await gpgSign(testFile, outFile);
34 | } catch (err) {
35 | return false;
36 | }
37 | return await fs.pathExists(outFile);
38 | });
39 | };
40 |
--------------------------------------------------------------------------------
/src/__spec__/healthcheck_spec.ts:
--------------------------------------------------------------------------------
1 | import * as chai from 'chai';
2 |
3 | import * as helpers from './_helpers';
4 |
5 | const { expect } = chai;
6 |
7 | describe('healthcheck endpoints', () => {
8 | before(helpers.startTestNucleus);
9 |
10 | describe('/healthcheck', () => {
11 | describe('GET', () => {
12 | it('should respond 200 OK', async () => {
13 | const response = await helpers.request
14 | .get('/healthcheck')
15 | .send();
16 |
17 | expect(response).to.have.status(200);
18 | });
19 |
20 | it('should response with a JSON body', async () => {
21 | const response = await helpers.request
22 | .get('/healthcheck')
23 | .send();
24 |
25 | expect(response).to.be.json;
26 | expect(response.body).to.deep.equal({ alive: true });
27 | });
28 | });
29 | });
30 |
31 | describe('/deepcheck', () => {
32 | describe('GET', () => {
33 | it('should respond 200 OK', async () => {
34 | const response = await helpers.request
35 | .get('/deepcheck')
36 | .send();
37 |
38 | expect(response).to.have.status(200);
39 | });
40 |
41 | it('should respond with a JSON body', async () => {
42 | const response = await helpers.request
43 | .get('/deepcheck')
44 | .send();
45 |
46 | expect(response).to.be.json;
47 | expect(response.body).to.deep.equal({ alive: true });
48 | });
49 | });
50 | });
51 |
52 | after(helpers.stopTestNucleus);
53 | });
54 |
--------------------------------------------------------------------------------
/docs/API.md:
--------------------------------------------------------------------------------
1 | # Public API
2 |
3 | Although the Nucleus service has several REST endpoints and puts files in
4 | specific locations on your file store only a few of these things are
5 | considered "Public API" and will be versioned / migrated appropriately.
6 |
7 | The following API's, concepts and contracts are considered "Public API":
8 |
9 | * The [Upload Endpoint](Uploading.md)
10 | * The [healthcheck and deepcheck endpoints](Endpoints.md)
11 | * The style of the generated path to the icons files
12 | * `/:appSlug/icon.png`
13 | * `/:appSlug/icon.ico`
14 | * The style of the generated path to the `RELEASES` file
15 | * `/:appSlug/:channelId/win32/:arch/RELEASES`
16 | * The style of the generated path to the `RELEASES.json` files
17 | * `/:appSlug/:channelId/darwin/:arch/RELEASES.json`
18 | * `/:appSlug/:channelId/darwin/:arch/:rollout/RELEASES.json`
19 | * The style of the generated path to Debian repo
20 | * `/:appSlug/:channelId/linux/debian/binary`
21 | * The style of the generated path to Redhat repo
22 | * `/:appSlug/:channelId/linux/redhat`
23 | * `/:appSlug/:channelId/linux/:appSlug.repo`
24 | * The style of the generated path to the "Latest" releases
25 | * `/:appSlug/:channelId/latest/:platform/:arch/:appName.:extension`
26 |
27 | The version of Nucleus will follow [Semantic Versioning]()
28 | according to the impact on the above Pulic API. I.e. Breaking changes will
29 | result in a Major bump.
30 |
31 | Breaking changes will normally come with a migration tool built in to Nucleus
32 | but it is not guarunteed. You may have to do migrations for major versions
33 | manually.
34 |
35 | To further clarify this document outlines our goals, it is not an implicit
36 | contract or promise. This is simply what we **aim** to achieve.
--------------------------------------------------------------------------------
/docs/Staged Rollouts.md:
--------------------------------------------------------------------------------
1 | # Staged Rollouts
2 |
3 | Nucleus has built in support for staged rollouts on some platforms. Currently
4 | we support Staged Rollouts on `darwin` and `win32`.
5 |
6 | Linux support for staged rollouts is **unlikely** to ever happen, you should
7 | take this into account when shipping new releases. As soon as you release
8 | new linux updates, 100% of your users will be eligible to get them regardless
9 | of your rollout setting.
10 |
11 | ## How do I control the rollout of a version?
12 |
13 | By default all versions will have a `0%` rollout (with the exception of the
14 | first version for any channel, that will have a `100%` rollout). You can
15 | update the rollout for any version by navigating to it in the Nucleus UI
16 | and clicking `Edit` next to the rollout percentage.
17 |
18 | **NOTE:** You can't update the rollout of some versions due to restrictions
19 | imposed by the [Latest Releases](Latest Releases.md) feature. See that doc
20 | for more information
21 |
22 | ## How do I utilize staged rollouts
23 |
24 | The non-staged version of the `RELEASES.json` file for macOS can be found at a
25 | path that looks something like this:
26 |
27 | ```
28 | /:appSlug/:channelId/darwin/:arch/RELEASES.json
29 | ```
30 |
31 | If you want to use staged rollouts, you just need to add a rollout percentage
32 | before `RELEASES.json`. The percentage **must** be an integer number between
33 | 0 and 100 inclusive. Any other number will cause a 404 and result in broken
34 | updates. It **MUST** be an integer. For example a valid staged update URL
35 | would be:
36 |
37 | ```
38 | /:appSlug/:channelId/darwin/:arch/47/RELEASES.json
39 | ```
40 |
41 | You should generate the staged rollout numbebr to use client side in your
42 | application.
--------------------------------------------------------------------------------
/src/rest/auth.ts:
--------------------------------------------------------------------------------
1 | import * as express from 'express';
2 | import * as passport from 'passport';
3 | import * as session from 'express-session';
4 | import * as createRedisStore from 'connect-redis';
5 |
6 | import { initializeStrategy } from './auth-strategy';
7 | import { sessionConfig } from '../config';
8 |
9 | const strategyName = initializeStrategy();
10 |
11 | passport.serializeUser((user, cb) => cb(null, user));
12 | passport.deserializeUser((user, cb) => cb(null, user));
13 |
14 | const router = express();
15 |
16 | router.get('/login', passport.authenticate(strategyName), (req, res) => {
17 | res.redirect('/');
18 | });
19 | router.get('/callback', passport.authenticate(strategyName, { failureRedirect: '/rest/auth/login' }), (req, res) => {
20 | res.redirect('/');
21 | });
22 | router.get('/logout', (req, res) => {
23 | req.logOut();
24 | res.redirect('/');
25 | });
26 |
27 | /* tslint:disable */
28 | const RedisStore = createRedisStore(session);
29 | /* tslint:enable */
30 | const sessionOpts: session.SessionOptions = {
31 | secret: sessionConfig.secret,
32 | resave: false,
33 | saveUninitialized: false,
34 | };
35 |
36 | switch (sessionConfig.type) {
37 | case 'redis':
38 | if (!sessionConfig.redis) {
39 | console.error('Expected sessionConfig.redis to exist when type=redis');
40 | process.exit(1);
41 | } else {
42 | sessionOpts.store = new RedisStore({
43 | host: sessionConfig.redis.host,
44 | port: sessionConfig.redis.port,
45 | });
46 | }
47 | break;
48 | }
49 |
50 | export const authenticateRouter = router;
51 | export const setupApp = (app: express.Router) => {
52 | app.use(session(sessionOpts));
53 | app.use(passport.initialize());
54 | app.use(passport.session());
55 | };
56 |
--------------------------------------------------------------------------------
/src/files/utils/__spec__/tmp_spec.ts:
--------------------------------------------------------------------------------
1 | import { expect } from 'chai';
2 | import * as fs from 'fs-extra';
3 | import * as path from 'path';
4 |
5 | import { withTmpDir } from '../tmp';
6 |
7 | describe('withTmpDir', () => {
8 | it('should create an empty directory', async () => {
9 | await withTmpDir(async (tmpDir: string) => {
10 | expect(tmpDir).to.not.equal(null);
11 | expect(tmpDir).to.be.a('string');
12 | expect(await fs.pathExists(tmpDir)).to.equal(true);
13 | expect(await fs.readdir(tmpDir)).to.have.lengthOf(0);
14 | });
15 | });
16 |
17 | it('should delete the directory after the async fn resolves', async () => {
18 | let tmp: string;
19 | await withTmpDir(async (tmpDir: string) => {
20 | tmp = tmpDir;
21 | await fs.writeFile(path.resolve(tmpDir, 'foo'), 'bar');
22 | });
23 | expect(await fs.pathExists(tmp!)).to.equal(false);
24 | });
25 |
26 | it('should delete the directory after the async fn rejects', async () => {
27 | let tmp: string;
28 | let threw = false;
29 | try {
30 | await withTmpDir(async (tmpDir: string) => {
31 | tmp = tmpDir;
32 | throw 'foo';
33 | });
34 | } catch (err) {
35 | expect(err).to.equal('foo');
36 | threw = true;
37 | }
38 | expect(threw).to.equal(true);
39 | expect(await fs.pathExists(tmp!)).to.equal(false);
40 | });
41 |
42 | it('should return the value returned from the inner async fn', async () => {
43 | const returnValue = await withTmpDir(async () => {
44 | return 1;
45 | });
46 | expect(returnValue).to.equal(1);
47 | });
48 |
49 | it('should not throw if the tmp dir is cleaned up internally', async () => {
50 | await withTmpDir(async (tmpDir) => {
51 | await fs.remove(tmpDir);
52 | });
53 | });
54 | });
55 |
--------------------------------------------------------------------------------
/src/files/utils/__spec__/p-queue_spec.ts:
--------------------------------------------------------------------------------
1 | import { expect } from 'chai';
2 | import { spy } from 'sinon';
3 |
4 | import { runPQ } from '../p-queue';
5 |
6 | describe('runPQ', () => {
7 | it('should run for all items in the item set', async () => {
8 | const result = await runPQ([1, 2, 3, 4], async (n) => {
9 | return n + 1;
10 | });
11 | expect(result).to.deep.equal([2, 3, 4, 5]);
12 | });
13 |
14 | it('should return items in the correct order', async () => {
15 | const result = await runPQ([1, 5, 10, 15, 20, 25, 30], async (n) => {
16 | return n / 5;
17 | });
18 | expect(result).to.deep.equal([0.2, 1, 2, 3, 4, 5, 6]);
19 | });
20 |
21 | it('should throw an error when simultaneous is set to 0', async () => {
22 | try {
23 | await runPQ([1, 2, 3], async n => n + 1, 0);
24 | } catch (err) {
25 | expect(err).to.not.equal(null, 'should have thrown an error');
26 | return;
27 | }
28 | expect(0).to.equal(1, 'should have thrown an error');
29 | });
30 |
31 | it('should exit early when the executor throws', async () => {
32 | const executor = spy(async (n: number) => {
33 | if (n === 2) {
34 | throw 'bad';
35 | }
36 | return n;
37 | });
38 | try {
39 | await runPQ([1, 2, 3], executor, 1);
40 | } catch (err) {
41 | expect(err).to.equal('bad');
42 | expect(executor.callCount).to.equal(2);
43 | return;
44 | }
45 | expect(0).to.equal(1, 'should have thrown an error');
46 | });
47 |
48 | it('should never have more than simultaneous things running', async () => {
49 | let running = 0;
50 | await runPQ((Array(1000)).fill(0), async () => {
51 | running += 1;
52 | await new Promise(r => setTimeout(r, 1));
53 | expect(running).to.be.lte(10);
54 | running -= 1;
55 | }, 10);
56 | });
57 | });
58 |
--------------------------------------------------------------------------------
/src/migrations/file-sha/FileSHAMigration.ts:
--------------------------------------------------------------------------------
1 | import BaseMigration, { MigrationItem } from '../BaseMigration';
2 | import driver from '../../db/driver';
3 | import store from '../../files/store';
4 | import Positioner from '../../files/Positioner';
5 | import { IDBDriver } from '../../db/BaseDriver';
6 | import { generateSHAs } from '../../files/utils/sha';
7 |
8 | interface FileSHAMigrationItem {
9 | fileId: number;
10 | indexKey: string;
11 | }
12 |
13 | export default class FileSHAMigration extends BaseMigration {
14 | key = 'file-sha';
15 | friendlyName = 'File SHA Migration';
16 | dependsOn = ['file-index'];
17 | private positioner: Positioner;
18 |
19 | constructor(private mStore: IFileStore = store, private mDriver: IDBDriver = driver) {
20 | super();
21 | this.positioner = new Positioner(mStore);
22 | }
23 |
24 | async getItems() {
25 | const apps = await this.mDriver.getApps();
26 | const items: MigrationItem[] = [];
27 |
28 | for (const app of apps) {
29 | for (const channel of app.channels) {
30 | for (const version of channel.versions) {
31 | for (const file of version.files) {
32 | items.push({
33 | done: Boolean(file.sha1 && file.sha256),
34 | data: {
35 | fileId: file.id,
36 | indexKey: this.positioner.getIndexKey(app, channel, version, file),
37 | },
38 | });
39 | }
40 | }
41 | }
42 | }
43 |
44 | return items;
45 | }
46 |
47 | async runOnItem(item: MigrationItem) {
48 | if (item.done) return;
49 | this.d(`generated SHAs for file(${item.data.fileId}) located at ${item.data.indexKey}`);
50 |
51 | const file = await this.mStore.getFile(item.data.indexKey);
52 | await this.mDriver.storeSHAs({ id: item.data.fileId } as any, generateSHAs(file));
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/public/components/UserDropDown.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { withRouter, RouteComponentProps } from 'react-router';
3 |
4 | import DropdownList, { Group, Item } from '@atlaskit/droplist';
5 |
6 | class UserDropDown extends React.PureComponent & {
7 | user: User;
8 | }, { open: boolean }> {
9 | constructor(props, context) {
10 | super(props, context);
11 |
12 | this.state = {
13 | open: false,
14 | };
15 | }
16 |
17 | private goToApps = () => {
18 | if (this.props.location.pathname !== '/apps') {
19 | this.props.history.push('/apps');
20 | }
21 | this.setState({
22 | open: false,
23 | });
24 | }
25 |
26 | private goToMigrations = () => {
27 | if (this.props.location.pathname !== '/migrations') {
28 | this.props.history.push('/migrations');
29 | }
30 | this.setState({
31 | open: false,
32 | });
33 | }
34 |
35 | private openChange = (attrs) => {
36 | this.setState({
37 | open: attrs.isOpen,
38 | });
39 | }
40 |
41 | private toggle = () => {
42 | this.setState({
43 | open: !this.state.open,
44 | });
45 | }
46 |
47 | render() {
48 | return (
49 | {this.props.children}}
56 | >
57 |
58 | - My Applications
59 | {
60 | this.props.user.isAdmin
61 | ? (
62 | - Migrations
63 | ) : null
64 | }
65 | - Logout
66 |
67 |
68 | );
69 | }
70 | }
71 |
72 | export default withRouter(UserDropDown);
73 |
--------------------------------------------------------------------------------
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | jobs:
3 | build_runner:
4 | docker:
5 | - image: circleci/node:8.12
6 | working_directory: ~/repo
7 | steps:
8 | - checkout
9 | - setup_remote_docker
10 | - run:
11 | command: ./.circleci/publish-runner-docker.sh
12 | name: Publish Runner
13 |
14 | validate:
15 | docker:
16 | - image: samatlassian/nucleus-ci-runner:latest
17 | working_directory: ~/repo
18 | steps:
19 | - checkout
20 | - restore_cache:
21 | keys:
22 | - v1-dependencies-{{ checksum "yarn.lock" }}
23 | - v1-dependencies-
24 | - run:
25 | command: yarn install
26 | name: Install Dependencies
27 | - save_cache:
28 | paths:
29 | - node_modules
30 | key: v1-dependencies-{{ checksum "yarn.lock" }}
31 | - run:
32 | command: yarn build:server
33 | name: Build Server Code
34 | - run:
35 | command: yarn lint
36 | name: Lint Code
37 | - run:
38 | command: cp config.template.js config.js && yarn test
39 | name: Run Tests
40 |
41 | publish:
42 | docker:
43 | - image: circleci/node:8.12
44 | working_directory: ~/repo
45 | steps:
46 | - checkout
47 | - setup_remote_docker
48 | - restore_cache:
49 | keys:
50 | - v1-dependencies-{{ checksum "yarn.lock" }}
51 | - v1-dependencies-
52 | - run:
53 | command: yarn install
54 | name: Install Dependencies
55 | - run:
56 | command: ./.circleci/publish-nucleus-docker.sh
57 | name: Deploy to Docker
58 |
59 | workflows:
60 | version: 2
61 | build:
62 | jobs:
63 | - build_runner
64 | - validate:
65 | requires:
66 | - build_runner
67 | - publish:
68 | requires:
69 | - validate
70 | filters:
71 | branches:
72 | only: /^(?!pull\/).*$/
73 |
--------------------------------------------------------------------------------
/docs/Latest Releases.md:
--------------------------------------------------------------------------------
1 | # Latest Releases
2 |
3 | Nucleus has built in support for providing a static "latest" release download
4 | URL that will work and automatically update whenever you release a new version.
5 |
6 | This URL is in the following format:
7 |
8 | ```
9 | {staticFileBaseUrl}/:appSlug/:channelId/latest/:platform/:arch/:appName.{extension}
10 |
11 | # Example
12 |
13 | https://update.myapp.com/my-app/123/latest/darwin/x64/My App.dmg
14 | ```
15 |
16 | Any file uploaded to a given version on Nucleus which we consider an "installer"
17 | file will be positioned at that path with it's extesnion. The file types we
18 | currently consider installers are:
19 |
20 | * `.dmg`
21 | * `.pkg`
22 | * `.exe`
23 | * `.msi`
24 | * `.deb`
25 | * `.rpm`
26 |
27 | For a given extension the file you will find at it's "latest" path will be the
28 | file from the version with the "highest" (according to semver) version number
29 | that meets the following criteria:
30 |
31 | * The version must not be flagged as `dead`
32 | * The version must be at `100%` rollout
33 |
34 | # Impact on other Nucleus Functionality
35 |
36 | In order to be Strongly Consistent and avoid scenarios where no user can
37 | install your application the following restrictions take affect on other
38 | nucleus features.
39 |
40 | * Once a version is at `100%` rollout you can not change it's rollout
41 | again
42 | * You can't mark a version at 100% rollout as dead if there is no newer
43 | non-dead version at 100% rollout as well
44 |
45 | # Weird things you may notice
46 |
47 | It is possible depending on how you release your app for the latest `.dmg`
48 | installer to be a completely different version to the latest `.exe` installer.
49 | This is because we will not **delete** a file from the static file store, we
50 | only ever overwrite existing files. This means that if you are currently at
51 | version `1.0.0` and release a `.dmg` file in `1.0.1` and nothing else the DMG
52 | file will be updated bubt the `.exe` file will remain pointing at `1.0.0` until
53 | you release a newer EXE file.
--------------------------------------------------------------------------------
/src/files/local/LocalStore.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'fs-extra';
2 | import * as path from 'path';
3 |
4 | import * as config from '../../config';
5 |
6 | export default class LocalStore implements IFileStore {
7 | constructor(private localConfig = config.local) {}
8 |
9 | private getPath(...keys: string[]) {
10 | return path.resolve(this.localConfig.root, ...keys);
11 | }
12 |
13 | public async putFile(key: string, data: Buffer, overwrite = false) {
14 | if (overwrite || !await fs.pathExists(this.getPath(key))) {
15 | await fs.mkdirp(path.dirname(this.getPath(key)));
16 | await fs.writeFile(this.getPath(key), data);
17 | return true;
18 | }
19 | return false;
20 | }
21 |
22 | public async getFile(key: string) {
23 | if (await this.hasFile(key)) {
24 | return await fs.readFile(this.getPath(key));
25 | }
26 | return Buffer.from('');
27 | }
28 |
29 | public async getFileSize(key: string) {
30 | if (await this.hasFile(key)) {
31 | return (await fs.stat(this.getPath(key))).size;
32 | }
33 | return 0;
34 | }
35 |
36 | public async hasFile(key: string) {
37 | if (await fs.pathExists(this.getPath(key))) {
38 | return (await fs.stat(this.getPath(key))).isFile();
39 | }
40 | return false;
41 | }
42 |
43 | public async deletePath(key: string) {
44 | if (await fs.pathExists(this.getPath(key))) {
45 | await fs.remove(this.getPath(key));
46 | }
47 | }
48 |
49 | public async getPublicBaseUrl() {
50 | return this.localConfig.staticUrl;
51 | }
52 |
53 | public async listFiles(prefix: string) {
54 | const files: string[] = [];
55 | if (!await fs.pathExists(this.getPath(prefix))) return files;
56 | for (const child of await fs.readdir(this.getPath(prefix))) {
57 | const childPath = this.getPath(prefix, child);
58 | if ((await fs.stat(childPath)).isDirectory()) {
59 | files.push(...await this.listFiles(path.join(prefix, child)));
60 | } else {
61 | files.push(path.join(prefix, child));
62 | }
63 | }
64 | return files;
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const webpack = require('webpack');
3 |
4 | const {BundleAnalyzerPlugin} = require('webpack-bundle-analyzer');
5 | const DashboardPlugin = require('webpack-dashboard/plugin');
6 | const FaviconsWebpackPlugin = require('favicons-webpack-plugin');
7 | const HtmlWebpackPlugin = require('html-webpack-plugin');
8 |
9 | const loaders = require('./webpack.loaders');
10 |
11 | const HOST = process.env.NUCLEUS_HOST || '127.0.0.1';
12 | const PORT = process.env.NUCLEUS_PORT || '8888';
13 |
14 | let mainPort = 3030;
15 | try {
16 | mainPort = require('./config.js').port
17 | } catch (err) {
18 |
19 | }
20 |
21 | module.exports = {
22 | entry: [
23 | 'react-hot-loader/patch',
24 | './public/index.tsx',
25 | ],
26 | devtool: process.env.WEBPACK_DEVTOOL || 'eval-source-map',
27 | output: {
28 | publicPath: '/',
29 | path: path.join(__dirname, 'public_out'),
30 | filename: 'bundle.js',
31 | },
32 | resolve: {
33 | extensions: ['.ts', '.tsx', '.js', '.jsx'],
34 | },
35 | module: {
36 | loaders,
37 | },
38 | devServer: {
39 | contentBase: './public_out',
40 | // do not print bundle build stats
41 | noInfo: true,
42 | // enable HMR
43 | hot: true,
44 | // embed the webpack-dev-server runtime into the bundle
45 | inline: true,
46 | // serve index.html in place of 404 responses to allow HTML5 history
47 | historyApiFallback: true,
48 | port: PORT,
49 | host: HOST,
50 | proxy: {
51 | '/rest': `http://localhost:${mainPort}`
52 | }
53 | },
54 | plugins: [
55 | new webpack.NoEmitOnErrorsPlugin(),
56 | new webpack.HotModuleReplacementPlugin(),
57 | new DashboardPlugin(),
58 | new HtmlWebpackPlugin({
59 | template: './public/template.html',
60 | title: 'Nucleus',
61 | }),
62 | new FaviconsWebpackPlugin({
63 | logo: path.resolve(__dirname, 'public/favicon.png'),
64 | background: '#0F4AA3'
65 | }),
66 | ],
67 | };
68 |
69 | if (process.env.ANALYZE) {
70 | module.exports.plugins.push(new BundleAnalyzerPlugin());
71 | }
72 |
--------------------------------------------------------------------------------
/src/rest/migration.ts:
--------------------------------------------------------------------------------
1 | import * as debug from 'debug';
2 | import * as express from 'express';
3 |
4 | import driver from '../db/driver';
5 | import { createA } from '../utils/a';
6 | import BaseMigration, { MigrationStore } from '../migrations/BaseMigration';
7 |
8 | import { requireAdmin } from './_helpers';
9 |
10 | const d = debug('nucleus:rest:migrations');
11 | const a = createA(d);
12 |
13 | const migrationRouter = express();
14 |
15 | migrationRouter.use('/:key', requireAdmin, a(async (req, res, next) => {
16 | const migration = MigrationStore.get(req.params.key);
17 | if (!migration) {
18 | return res.status(404).json({
19 | error: 'Migration with provided key is not found',
20 | });
21 | }
22 | req.migration = {
23 | internal: (await driver.getMigrations()).find(m => m.key === req.params.key)!,
24 | migrator: migration,
25 | };
26 | next();
27 | }));
28 |
29 | migrationRouter.get('/:key', a(async (req, res) => {
30 | const internalMigrations = await driver.getMigrations();
31 | const migration: BaseMigration = req.migration.migrator;
32 | if (internalMigrations.find(m => migration.dependsOn.includes(m.key) && !m.complete)) {
33 | return res.status(401).json({
34 | error: 'This migration depends on migrations that have not yet completed',
35 | });
36 | }
37 |
38 | const items = await migration.getItems();
39 | if (items.length === 0 || !items.some(item => !item.done)) {
40 | req.migration.internal.complete = true;
41 | await (req.migration.internal as any).save();
42 | }
43 | res.json(items);
44 | }));
45 |
46 | migrationRouter.post('/:key', a(async (req, res) => {
47 | if (req.body && req.body.item && req.body.item.data) {
48 | const migration: BaseMigration = req.migration.migrator;
49 | await migration.runOnItem(req.body.item);
50 | if (!(await migration.getItems()).find(item => !item.done)) {
51 | req.migration.internal.complete = true;
52 | await (req.migration.internal as any).save();
53 | }
54 | res.status(200).send();
55 | } else {
56 | res.status(400).json({
57 | error: 'You must provide an item',
58 | });
59 | }
60 | }));
61 |
62 | export default migrationRouter;
63 |
--------------------------------------------------------------------------------
/webpack.loaders.js:
--------------------------------------------------------------------------------
1 | module.exports = [
2 | {
3 | test: /\.tsx?$/,
4 | exclude: /(node_modules|bower_components|public_out\/)/,
5 | loader: 'awesome-typescript-loader',
6 | options: {
7 | configFileName: 'tsconfig.public.json',
8 | transpileOnly: true
9 | }
10 | },
11 | {
12 | test: /\.eot(\?v=\d+\.\d+\.\d+)?$/,
13 | exclude: /(node_modules|bower_components)/,
14 | loader: 'file-loader',
15 | },
16 | {
17 | test: /\.(woff|woff2)$/,
18 | exclude: /(node_modules|bower_components)/,
19 | loader: 'url-loader?prefix=font/&limit=5000',
20 | },
21 | {
22 | test: /\.ttf(\?v=\d+\.\d+\.\d+)?$/,
23 | exclude: /(node_modules|bower_components)/,
24 | loader: 'url-loader?limit=10000&mimetype=application/octet-stream',
25 | },
26 | {
27 | test: /\.svg(\?v=\d+\.\d+\.\d+)?$/,
28 | exclude: /(node_modules|bower_components)/,
29 | loader: 'url-loader?limit=10000&mimetype=image/svg+xml',
30 | },
31 | {
32 | test: /\.gif/,
33 | exclude: /(node_modules|bower_components)/,
34 | loader: 'url-loader?limit=10000&mimetype=image/gif',
35 | },
36 | {
37 | test: /\.jpg/,
38 | exclude: /(node_modules|bower_components)/,
39 | loader: 'url-loader?limit=10000&mimetype=image/jpg',
40 | },
41 | {
42 | test: /\.png/,
43 | exclude: /(node_modules|bower_components)/,
44 | loader: 'url-loader?limit=10000&mimetype=image/png',
45 | },
46 | {
47 | test: /\.css$/,
48 | exclude: /[/\\]src[/\\]/,
49 | loaders: [
50 | 'style-loader?sourceMap',
51 | 'css-loader',
52 | ],
53 | },
54 | {
55 | test: /\.scss$/,
56 | exclude: /[/\\](node_modules|bower_components|public_out\/)[/\\]/,
57 | loaders: [
58 | 'style-loader?sourceMap',
59 | 'css-loader?modules&importLoaders=1&localIdentName=[path]___[name]__[local]___[hash:base64:5]&sourceMap',
60 | 'postcss-loader',
61 | 'sass-loader',
62 | ],
63 | },
64 | {
65 | test: /\.css$/,
66 | exclude: /[/\\](node_modules|bower_components|public_out\/)[/\\]/,
67 | loaders: [
68 | 'style-loader?sourceMap',
69 | 'css-loader?modules&importLoaders=1&localIdentName=[path]___[name]__[local]___[hash:base64:5]&sourceMap'
70 | ],
71 | },
72 | ];
73 |
--------------------------------------------------------------------------------
/public/components/AppList.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { connect, Dispatch } from 'react-redux';
3 |
4 | import AppCard from './AppCard';
5 | import PageLoader from './PageLoader';
6 |
7 | import { fetchApps, setApps } from '../actions/apps';
8 |
9 | import * as styles from './AppList.scss';
10 |
11 | interface AppListReduxProps {
12 | apps: AppsSubState;
13 | }
14 | interface AppListReduxDispatchProps {
15 | setApps: (apps: NucleusApp[]) => any;
16 | }
17 | interface AppListComponentProps {}
18 |
19 | class AppList extends React.PureComponent {
20 | state = { loading: false };
21 |
22 | componentDidMount() {
23 | if (!this.props.apps) {
24 | this.fetchApps();
25 | }
26 | }
27 |
28 | async fetchApps() {
29 | this.setState({
30 | loading: true,
31 | });
32 | this.props.setApps(await fetchApps());
33 | this.setState({
34 | loading: false,
35 | });
36 | }
37 |
38 | render() {
39 | return (
40 |
41 |
Applications
42 |
43 | {
44 | !this.state.loading && this.props.apps
45 | ? (
46 | this.props.apps.length > 0
47 | ? (
48 | this.props.apps.map((app) => {
49 | return (
50 |
51 | );
52 | })
53 | ) : (
54 |
55 |
You don't have access to any Applications yet
56 | Make your own using the Create button in the sidebar, or ask one of your teammates to give you access
57 |
58 | )
59 | )
60 | : null
61 | }
62 |
63 | );
64 | }
65 | }
66 |
67 | const mapStateToProps = (state: AppState) => ({
68 | apps: state.apps,
69 | });
70 |
71 | const mapDispatchToProps = (dispatch: Dispatch) => ({
72 | setApps: (apps: NucleusApp[]) => dispatch(setApps(apps)),
73 | });
74 |
75 | export default connect(mapStateToProps, mapDispatchToProps)(AppList);
76 |
--------------------------------------------------------------------------------
/public/components/AppPage.scss:
--------------------------------------------------------------------------------
1 | .tokenContainer {
2 | width: 100%;
3 |
4 | > div {
5 | width: 100%;
6 | margin-bottom: 4px;
7 | }
8 |
9 | .token {
10 | width: 100%;
11 | border: 0;
12 | background: none;
13 | outline: 0;
14 | font-size: 14px;
15 | }
16 | }
17 |
18 | .iconContainer {
19 | width: 30%;
20 |
21 | > .img {
22 | width: 50%;
23 | margin-left: 25%;
24 | position: relative;
25 | cursor: pointer;
26 | overflow: hidden;
27 |
28 | > img {
29 | width: 100%;
30 | }
31 |
32 | > input[type="file"] {
33 | z-index: 999;
34 | }
35 |
36 | &::before {
37 | background: rgba(255, 255, 255, 0.5);
38 | width: 100%;
39 | height: 100%;
40 | position: absolute;
41 | content: '';
42 | display: block;
43 | top: 0;
44 | opacity: 0;
45 | transition: opacity 0.2s ease-in-out;
46 | cursor: pointer;
47 | }
48 |
49 | &:hover::before {
50 | opacity: 1;
51 | }
52 |
53 | $afterHeight: 28px;
54 | .after {
55 | background-color: rgba(0, 71, 162, 0.9);
56 | color: white;
57 | display: flex;
58 | align-items: center;
59 | justify-content: center;
60 | width: 100%;
61 | height: $afterHeight;
62 | position: absolute;
63 | bottom: -$afterHeight;
64 | transition: bottom 0.2s ease-in-out;
65 | cursor: pointer;
66 | z-index: 998;
67 | }
68 |
69 | &:hover .after {
70 | bottom: 0;
71 | }
72 | }
73 | }
74 |
75 | .tab {
76 | flex: 1;
77 | width: 100%;
78 |
79 | h5 {
80 | margin-top: 16px !important;
81 | margin-bottom: 8px;
82 | }
83 | }
84 |
85 | .codeCard {
86 | box-sizing: border-box;
87 | padding: 10px 15px;
88 | border-radius: 3px;
89 | color: inherit;
90 | box-shadow: 0 4px 8px -2px rgba(9,30,66,.28), 0 0 1px rgba(9,30,66,.3);
91 | margin: 16px 0 8px 0;
92 | width: 100%;
93 | }
94 |
95 | .notFound {
96 | text-align: center;
97 | height: calc(100vh - 96px);
98 | display: flex;
99 | align-content: center;
100 | justify-content: center;
101 | flex-direction: column;
102 | align-items: center;
103 |
104 | h1 {
105 | font-size: 144px;
106 | }
107 |
108 | h3 {
109 | font-size: 72px;
110 | }
111 | }
--------------------------------------------------------------------------------
/src/files/utils/win32.ts:
--------------------------------------------------------------------------------
1 | import * as path from 'path';
2 | import * as semver from 'semver';
3 | import Positioner from '../Positioner';
4 |
5 | export interface Win32HelperOpts {
6 | app: NucleusApp;
7 | channel: NucleusChannel;
8 | arch: string;
9 | store: IFileStore;
10 | positioner: Positioner;
11 | }
12 |
13 | export const generateWin32ReleasesStructure = async ({
14 | app,
15 | channel,
16 | arch,
17 | store,
18 | positioner,
19 | }: Win32HelperOpts, rollout = 100) => {
20 | const root = path.posix.join(app.slug, channel.id, 'win32', arch);
21 |
22 | const versions: NucleusVersion[] = channel.versions
23 | .filter(v => !v.dead && v.rollout >= rollout);
24 |
25 | if (versions.length === 0) return '';
26 | const releases: string[] = [];
27 |
28 | for (const version of versions.sort((a, b) => semver.compare(a.name, b.name))) {
29 | for (const file of version.files) {
30 | if (file.fileName.endsWith('-full.nupkg') || file.fileName.endsWith('-delta.nupkg')) {
31 | const fileSize = await store.getFileSize(positioner.getIndexKey(app, channel, version, file));
32 | const absoluteUrl = `${await store.getPublicBaseUrl()}/${root}/${file.fileName}`;
33 | releases.push(
34 | `${file.sha1.toUpperCase()} ${absoluteUrl} ${fileSize}`,
35 | );
36 | }
37 | }
38 | }
39 |
40 | return releases.join('\n');
41 | };
42 |
43 | export const updateWin32ReleasesFiles = async ({
44 | app,
45 | channel,
46 | arch,
47 | store,
48 | positioner,
49 | }: Win32HelperOpts) => {
50 | const root = path.posix.join(app.slug, channel.id, 'win32', arch);
51 | const releasesKey = path.posix.join(root, 'RELEASES');
52 | const releases = await generateWin32ReleasesStructure(
53 | {
54 | app,
55 | channel,
56 | arch,
57 | store,
58 | positioner,
59 | },
60 | 0, // The default RELEASES file ignores all rollout numbers
61 | );
62 | await store.putFile(releasesKey, Buffer.from(releases, 'utf8'), true);
63 |
64 | for (let rollout = 0; rollout <= 100; rollout += 1) {
65 | const rolloutKey = path.posix.join(root, `${rollout}`, 'RELEASES');
66 | const rolloutReleases = await generateWin32ReleasesStructure(
67 | {
68 | app,
69 | channel,
70 | arch,
71 | store,
72 | positioner,
73 | },
74 | rollout,
75 | );
76 | await store.putFile(rolloutKey, Buffer.from(rolloutReleases, 'utf8'), true);
77 | }
78 | };
79 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Code of Conduct
2 |
3 | As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.
4 |
5 | We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality.
6 |
7 | Examples of unacceptable behavior by participants include:
8 |
9 | * The use of sexualized language or imagery
10 | * Personal attacks
11 | * Trolling or insulting/derogatory comments
12 | * Public or private harassment
13 | * Publishing other's private information, such as physical or electronic addresses, without explicit permission
14 | * Submitting contributions or comments that you know to violate the intellectual property or privacy rights of others
15 | * Other unethical or unprofessional conduct
16 |
17 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
18 | By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team.
19 |
20 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community.
21 |
22 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer. Complaints will result in a response and be reviewed and investigated in a way that is deemed necessary and appropriate to the circumstances. Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident.
23 |
24 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.3.0, available at [http://contributor-covenant.org/version/1/3/0/][version]
25 |
26 | [homepage]: http://contributor-covenant.org
27 | [version]: http://contributor-covenant.org/version/1/3/0/
--------------------------------------------------------------------------------
/public/App.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { Router, Route, Redirect, browserHistory } from 'react-router';
3 | import { connect } from 'react-redux';
4 |
5 | import { setBaseUpdateUrl } from './actions/base';
6 | import { setMigrations } from './actions/migrations';
7 | import { setUser } from './actions/user';
8 | import PageWrapper from './components/PageWrapper';
9 | import AppList from './components/AppList';
10 | import AppPage from './components/AppPage';
11 | import MigrationList from './components/MigrationList';
12 |
13 | import AkSpinner from '@atlaskit/spinner';
14 |
15 | import * as styles from './App.scss';
16 |
17 | class App extends React.PureComponent<{
18 | user: {
19 | user: User,
20 | signedIn: boolean,
21 | },
22 | setUser: typeof setUser,
23 | setBaseUpdateUrl: typeof setBaseUpdateUrl,
24 | setMigrations: typeof setMigrations,
25 | }, {}> {
26 | async componentDidMount() {
27 | const response = await fetch('/rest/config', { credentials: 'include' });
28 | const config: { user: User, app: Application, baseUpdateUrl: string, migrations: NucleusMigration[] } = await response.json();
29 | this.props.setBaseUpdateUrl(config.baseUpdateUrl);
30 | this.props.setMigrations(config.migrations);
31 | this.props.setUser(config.user);
32 | if (!config.user) {
33 | window.location.href = '/rest/auth/login';
34 | }
35 | }
36 |
37 | render() {
38 | if (!this.props.user || !this.props.user.signedIn) {
39 | return (
40 |
43 | );
44 | }
45 |
46 | return (
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | {/* */}
55 |
56 |
57 | );
58 | }
59 | }
60 |
61 | const mapStateToProps = (state: AppState) => ({
62 | user: state.user,
63 | });
64 |
65 | const mapDispatchToProps = dispatch => ({
66 | setUser: user => dispatch(setUser(user)),
67 | setBaseUpdateUrl: url => dispatch(setBaseUpdateUrl(url)),
68 | setMigrations: migrations => dispatch(setMigrations(migrations)),
69 | });
70 |
71 | export default connect(mapStateToProps, mapDispatchToProps)(App);
72 |
--------------------------------------------------------------------------------
/docs/WebHooks.md:
--------------------------------------------------------------------------------
1 | # WebHooks
2 |
3 | ## Registering a WebHook
4 |
5 | WebHooks are registered on a per-app level on the app configuration screen
6 | inside nucleus.
7 |
8 | ## WebHook Details
9 |
10 | ### Validating a web hook's origin
11 |
12 | When you create a WebHook you will receive a secret string, we will send that
13 | string in the `Authorization` header of our request. You should validate this
14 | yourself to ensure you aren't receiving bogus information.
15 |
16 | ### WebHook Body
17 |
18 | All WebHook requests we send will be `POST` requests with a `Content-Type` of
19 | `application/json`.
20 |
21 | #### WebHook Registered
22 |
23 | ```js
24 | {
25 | type: 'register',
26 | nucleusOrigin: String,
27 | app: {
28 | id: Number,
29 | name: String,
30 | slug: String
31 | }
32 | }
33 | ```
34 |
35 | #### WebHook Unregistered
36 |
37 | ```js
38 | {
39 | type: 'unregister',
40 | nucleusOrigin: String,
41 | app: {
42 | id: Number,
43 | name: String,
44 | slug: String
45 | }
46 | }
47 | ```
48 |
49 | #### Channel Created
50 |
51 | ```js
52 | {
53 | type: 'channel_created',
54 | nucleusOrigin: String,
55 | app: {
56 | id: Number,
57 | name: String,
58 | slug: String
59 | },
60 | channel: {
61 | id: String,
62 | name: String
63 | }
64 | }
65 | ```
66 |
67 | #### Version Created
68 |
69 | ```js
70 | {
71 | type: 'version_created',
72 | nucleusOrigin: String,
73 | app: {
74 | id: Number,
75 | name: String,
76 | slug: String
77 | },
78 | channel: {
79 | id: String,
80 | name: String
81 | },
82 | version: {
83 | name: String
84 | }
85 | }
86 | ```
87 |
88 | #### Version File Uploaded
89 |
90 | Please note that the `files` array in the object below will include
91 | **all** files that have been uploaded into that version, not just
92 | the ones that caused this hook to fire.
93 |
94 | Also if multiple files are uploaded simultaneously this hook will
95 | only be called once with multiple new files in the files array.
96 |
97 | ```js
98 | {
99 | type: 'version_file_uploaded',
100 | nucleusOrigin: String,,
101 | app: {
102 | id: Number,
103 | name: String,
104 | slug: String
105 | },
106 | channel: {
107 | id: String,
108 | name: String
109 | },
110 | version: {
111 | name: String,
112 | files: [{
113 | fileName: String,
114 | platform: String,
115 | arch: String,
116 | type: 'installer' | 'update'
117 | }]
118 | }
119 | }
120 | ```
--------------------------------------------------------------------------------
/src/config.ts:
--------------------------------------------------------------------------------
1 | import * as path from 'path';
2 |
3 | let config: IConfig | null = null;
4 |
5 | interface ResolvedConfig {
6 | err?: any;
7 | config: IConfig | null;
8 | }
9 |
10 | const resolveConfig = (path: string): ResolvedConfig => {
11 | try {
12 | require.resolve(path);
13 | } catch {
14 | return {
15 | config: null,
16 | };
17 | }
18 | try {
19 | return {
20 | config: require(path),
21 | };
22 | } catch (err) {
23 | return {
24 | err,
25 | config: null,
26 | };
27 | }
28 | };
29 |
30 | const possibleConfigs = [
31 | path.resolve(__dirname, '../config.js'),
32 | path.resolve(process.cwd(), 'config.js'),
33 | ];
34 |
35 | if (process.argv.length > 2) {
36 | possibleConfigs.unshift(path.resolve(process.cwd(), process.argv[2]));
37 | }
38 |
39 | for (const option of possibleConfigs) {
40 | const resolvedConfig = resolveConfig(option);
41 | if (resolvedConfig.config) {
42 | config = resolvedConfig.config;
43 | break;
44 | }
45 | if (resolvedConfig.err) {
46 | console.error('An error occurred while loading your config file');
47 | console.error('Please ensure it does not have syntax errors');
48 | console.error(resolvedConfig.err);
49 | process.exit(1);
50 | }
51 | }
52 |
53 | if (!config) {
54 | console.error('Failed to find your config file at any of the search paths');
55 | console.error('Paths:', possibleConfigs);
56 | console.error('Please ensure one exists');
57 | process.exit(1);
58 | }
59 |
60 | export const port = config!.port;
61 | export const baseURL = config!.baseURL;
62 | export const fileStrategy = config!.fileStrategy;
63 | export const dbStrategy = config!.dbStrategy;
64 | export const github: GitHubOptions = config!.github || {};
65 | export const openid: OpenIDOptions = config!.openid || {};
66 | export const adminIdentifiers = config!.adminIdentifiers || [];
67 | export const authStrategy = config!.authStrategy;
68 | export const s3 = config!.s3;
69 | export const local = config!.local;
70 | export const sequelize = config!.sequelize;
71 | export const localAuth = config!.localAuth;
72 | export const sessionConfig = config!.sessionConfig;
73 | export const organization = config!.organization;
74 | export const gpgSigningKey = config!.gpgSigningKey;
75 | export const defaultRollout = config!.defaultRollout || 0;
76 |
77 | if (
78 | defaultRollout < 0 ||
79 | defaultRollout > 100 ||
80 | typeof defaultRollout !== 'number' ||
81 | Math.round(defaultRollout) !== defaultRollout
82 | ) {
83 | throw new Error(
84 | `Expected 'config.defaultRollout' to be an integer between 0 and 100 but it was "${defaultRollout}"`,
85 | );
86 | }
87 |
--------------------------------------------------------------------------------
/public/components/WebHookLogsModal.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { connect, Dispatch } from 'react-redux';
3 |
4 | import AkButton from '@atlaskit/button';
5 | import AkFieldBase, { Label as AkLabel } from '@atlaskit/field-base';
6 | import AkFieldTextExported from '@atlaskit/field-text';
7 | import AkModalDialog from '@atlaskit/modal-dialog';
8 | import AkSpinner from '@atlaskit/spinner';
9 |
10 | import { fetchApps, setApps } from '../actions/apps';
11 |
12 | import * as styles from './WebHookLogsModal.scss';
13 |
14 | /* tslint:disable */
15 | const AkFieldText = AkFieldTextExported as any;
16 | /* tslint:enable */
17 |
18 | interface WebHookLogsModalProps {
19 | onDismiss: () => void;
20 | isOpen: boolean;
21 | hook: NucleusWebHook;
22 | }
23 |
24 | export default class WebHookLogsModal extends React.PureComponent {
25 | close = () => {
26 | this.props.onDismiss();
27 | }
28 |
29 | render() {
30 | return (
31 | Logs: {this.props.hook ? this.props.hook.url : ''}}
33 | footer={}
36 | isOpen={this.props.isOpen}
37 | onDialogDismissed={this.close}
38 | >
39 | {
40 | this.props.hook
41 | ? (
42 |
43 | {
44 | this.props.hook.errors.length > 0
45 | ? (
46 | this.props.hook.errors
47 | .concat(this.props.hook.errors)
48 | .concat(this.props.hook.errors)
49 | .concat(this.props.hook.errors)
50 | .concat(this.props.hook.errors)
51 | .concat(this.props.hook.errors)
52 | .concat(this.props.hook.errors)
53 | .concat(this.props.hook.errors)
54 | .concat(this.props.hook.errors)
55 | .concat(this.props.hook.errors)
56 | .concat(this.props.hook.errors).map(error => (
57 |
58 | Message: {error.message}{'\n'}
59 | Response Code: {error.responseCode}{'\n'}
60 | Response Body: {error.responseBody}
61 |
62 | ))
63 | ) : (
64 |
No Logs
65 | )
66 | }
67 |
68 | ) : null
69 | }
70 |
71 | );
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/src/__spec__/_helpers.ts:
--------------------------------------------------------------------------------
1 | import * as chai from 'chai';
2 | import * as cp from 'child_process';
3 | import * as fs from 'fs-extra';
4 | import * as http from 'http';
5 | import * as path from 'path';
6 |
7 | import LocalStore from '../files/local/LocalStore';
8 |
9 | const serveHandler = require('serve-handler');
10 |
11 | let child: cp.ChildProcess | null = null;
12 | let server: http.Server | null = null;
13 |
14 | export const startTestNucleus = async function (this: any) {
15 | this.timeout(7000);
16 |
17 | if (child !== null || server !== null) {
18 | throw new Error('Nucleus is already running, something went wrong in the tests');
19 | }
20 | await fs.remove(path.resolve(__dirname, 'fixtures', '.files'));
21 | await fs.remove(path.resolve(__dirname, 'fixtures', 'test.sqlite'));
22 |
23 | child = cp.spawn(
24 | process.execPath,
25 | [
26 | path.resolve(__dirname, '../../lib/index.js'),
27 | path.resolve(__dirname, './fixtures/test.config.js'),
28 | ],
29 | {
30 | cwd: path.resolve(__dirname, '..'),
31 | env: Object.assign({}, process.env, {
32 | DEBUG: 'nucleus*',
33 | UNSAFELY_DISABLE_NUCLEUS_AUTH: 'true',
34 | }),
35 | stdio: 'inherit',
36 | },
37 | );
38 | server = http.createServer((req, res) => {
39 | return serveHandler(req, res, {
40 | public: path.resolve(__dirname, 'fixtures/.files'),
41 | });
42 | });
43 | await new Promise(resolve => server!.listen(9999, () => resolve()));
44 | let alive = false;
45 | while (!alive) {
46 | try {
47 | await request.get('/').send();
48 | alive = true;
49 | } catch {
50 | // Ignore
51 | }
52 | }
53 | };
54 |
55 | export const stopTestNucleus = async () => {
56 | if (child) {
57 | const waiter = new Promise(resolve => child!.on('exit', () => resolve()));
58 | child.kill();
59 | await waiter;
60 | child = null;
61 | }
62 | if (server) {
63 | await new Promise(resolve => server!.close(() => resolve()));
64 | server = null;
65 | }
66 | await fs.remove(path.resolve(__dirname, 'fixtures', '.files'));
67 | await fs.remove(path.resolve(__dirname, 'fixtures', 'test.sqlite'));
68 | };
69 |
70 | export const request = chai.request('http://localhost:8987/rest');
71 |
72 | export const store = new LocalStore({
73 | root: path.resolve(__dirname, 'fixtures', '.files'),
74 | staticUrl: 'http://localhost:9999',
75 | });
76 |
77 | export const createApp = async (): Promise => {
78 | const response = await request
79 | .post('/app')
80 | .field('name', 'Test App')
81 | .attach('icon', fs.createReadStream(path.resolve(__dirname, 'fixtures', 'icon.png')));
82 |
83 | return response.body;
84 | };
85 |
--------------------------------------------------------------------------------
/public/components/ChannelVersionList.scss:
--------------------------------------------------------------------------------
1 | .versionSelect {
2 | box-sizing: border-box;
3 | padding: 10px 15px;
4 | border-radius: 3px;
5 | color: inherit;
6 | -webkit-box-shadow: 0 4px 8px -2px rgba(9, 30, 66, 0.28), 0 0 1px rgba(9, 30, 66, 0.3);
7 | box-shadow: 0 4px 8px -2px rgba(9, 30, 66, 0.28), 0 0 1px rgba(9, 30, 66, 0.3);
8 | margin: 16px 8px 8px 8px;
9 | width: calc(20% - 16px);
10 | text-align: center;
11 | cursor: pointer;
12 | position: relative;
13 | transition: background 0.2s linear, color 0.2s linear;
14 | background: transparent;
15 | color: black;
16 | display: inline-block;
17 | position: relative;
18 |
19 | &:hover {
20 | background: #0F4AA3;
21 | color: white;
22 | }
23 |
24 | .versionRolloutSuper {
25 | font-size: 8px;
26 | position: absolute;
27 | top: 4px;
28 | right: 4px;
29 | font-style: italic;
30 | }
31 | }
32 |
33 | .fileDownloadContainer {
34 | display: inline-block;
35 | width: 50%;
36 | padding: 8px;
37 | box-sizing: border-box;
38 |
39 | .fileDownload {
40 | color: #172B4D;
41 | background-color: #F4F5F7;
42 | display: flex;
43 | flex-direction: column;
44 | font-size: 12px;
45 | border-radius: 3px;
46 | box-shadow: 0 1px 1px rgba(9, 30, 66, 0.2), 0 0 1px 0 rgba(9, 30, 66, 0.24);
47 | transition: background-color 0.2s linear;
48 | cursor: pointer;
49 |
50 | &:hover {
51 | background-color: rgb(228, 228, 228);
52 | }
53 |
54 | .fileName {
55 | height: 24px;
56 | margin: 16px 16px 12px 16px;
57 | color: #091E42;
58 | font-size: 14px;
59 | font-weight: 500;
60 | max-width: 100%;
61 | display: flex;
62 | align-items: center;
63 |
64 | > span {
65 | margin-right: 8px;
66 | }
67 |
68 | > div {
69 | max-width: calc(100% - 40px);
70 | overflow: hidden;
71 | text-overflow: ellipsis;
72 | white-space: nowrap;
73 | }
74 | }
75 |
76 | .fileInfo {
77 | color: #5E6C84;
78 | height: 24px;
79 | margin: 0 8px 12px 16px;
80 |
81 | > span {
82 | margin-right: 12px;
83 | }
84 | }
85 | }
86 | }
87 |
88 | .versionModalSpinner {
89 | position: absolute;
90 | top: 0;
91 | left: 0;
92 | width: 100%;
93 | height: 100%;
94 | background-color: rgba(255, 255, 255, 0.8);
95 | pointer-events: none;
96 | opacity: 0;
97 | transition: opacity 0.1s ease-in-out;
98 | display: flex;
99 | align-items: center;
100 | justify-content: center;
101 |
102 | &.versionModalSpinning {
103 | opacity: 1;
104 | pointer-events: all;
105 | }
106 | }
--------------------------------------------------------------------------------
/src/files/s3/CloudFrontBatchInvalidator.ts:
--------------------------------------------------------------------------------
1 | import * as AWS from 'aws-sdk';
2 | import * as debug from 'debug';
3 |
4 | import S3Store from './S3Store';
5 |
6 | const hat = require('hat');
7 |
8 | const d = debug('nucleus:s3:cloudfront-invalidator');
9 |
10 | const invalidators: {
11 | [id: string]: CloudFrontBatchInvalidator;
12 | } = {};
13 |
14 | const INVALIDATE_PER_ATTEMPT = 500;
15 |
16 | export class CloudFrontBatchInvalidator {
17 | private lastAdd: number = 0;
18 | private queue: string[] = [];
19 | nextTimer: NodeJS.Timer;
20 |
21 | static noopInvalidator = new CloudFrontBatchInvalidator(null);
22 |
23 | static get(store: S3Store) {
24 | if (!store.s3Config.cloudfront) {
25 | return CloudFrontBatchInvalidator.noopInvalidator;
26 | }
27 | if (!invalidators[store.s3Config.cloudfront.distributionId]) {
28 | invalidators[store.s3Config.cloudfront.distributionId] = new CloudFrontBatchInvalidator(store.s3Config.cloudfront);
29 | }
30 | return invalidators[store.s3Config.cloudfront.distributionId];
31 | }
32 |
33 | private constructor(private cloudfrontConfig: S3Options['cloudfront']) {
34 | if (cloudfrontConfig) {
35 | this.queueUp();
36 | }
37 | }
38 |
39 | public addToBatch = (key: string) => {
40 | if (!this.cloudfrontConfig) return;
41 | const sanitizedKey = encodeURI(`/${key}`);
42 | if (this.queue.some(item => item === sanitizedKey)) return;
43 | this.queue.push(sanitizedKey);
44 | this.lastAdd = Date.now();
45 | }
46 |
47 | private queueUp() {
48 | clearTimeout(this.nextTimer);
49 | this.nextTimer = setTimeout(() => this.runJob(), 30000);
50 | }
51 |
52 | runJob() {
53 | if (this.queue.length === 0 || Date.now() - this.lastAdd <= 20000) {
54 | return this.queueUp();
55 | }
56 | d('running cloudfront batch invalidator');
57 | const itemsToUse = this.queue.slice(0, INVALIDATE_PER_ATTEMPT);
58 | this.queue = this.queue.slice(INVALIDATE_PER_ATTEMPT);
59 |
60 | const cloudFront = new AWS.CloudFront();
61 | cloudFront.createInvalidation({
62 | DistributionId: this.cloudfrontConfig!.distributionId,
63 | InvalidationBatch: {
64 | CallerReference: hat(),
65 | Paths: {
66 | Quantity: itemsToUse.length,
67 | Items: itemsToUse,
68 | },
69 | },
70 | }, (err, invalidateInfo) => {
71 | if (err) {
72 | console.error(JSON.stringify({
73 | err,
74 | message: 'Failed to invalidate',
75 | keys: itemsToUse,
76 | }));
77 | this.queue.push(...itemsToUse);
78 | } else {
79 | d('batch invalidation succeeded, moving along');
80 | }
81 | this.queueUp();
82 | });
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/__spec__/channel_spec.ts:
--------------------------------------------------------------------------------
1 | import * as chai from 'chai';
2 |
3 | import * as helpers from './_helpers';
4 |
5 | const { expect } = chai;
6 |
7 | describe('channel endpoints', () => {
8 | before(helpers.startTestNucleus);
9 | after(helpers.stopTestNucleus);
10 |
11 | describe('/app/:id/channel', () => {
12 | describe('POST', () => {
13 | let app: NucleusApp;
14 |
15 | before(async () => {
16 | app = await helpers.createApp();
17 | });
18 |
19 | it('should error if an invalid app ID is provided', async () => {
20 | const response = await helpers.request
21 | .post('/app/10000/channel');
22 |
23 | expect(response).to.have.status(404);
24 | expect(response.body.error).to.equal('App not found');
25 | });
26 |
27 | it('should error if no name is provided', async () => {
28 | const response = await helpers.request
29 | .post(`/app/${app.id}/channel`);
30 |
31 | expect(response).to.have.status(400);
32 | expect(response.body.error).to.equal('Missing required body param: "name"');
33 | });
34 |
35 | it('should create the channel when a name is provided', async function () {
36 | this.timeout(60000);
37 |
38 | const response = await helpers.request
39 | .post(`/app/${app.id}/channel`)
40 | .send({
41 | name: 'Stable',
42 | });
43 |
44 | expect(response).to.have.status(200);
45 | expect(response).to.be.json;
46 | expect(response.body).to.have.property('name', 'Stable');
47 | expect(response.body.versions).to.deep.equal([], 'should have no versions');
48 |
49 | expect(await helpers.store.hasFile(`${app.slug}/${response.body.id}/versions.json`))
50 | .to.equal(true, 'should create the versions.json file for the channel');
51 |
52 | expect(await helpers.store.hasFile(`${app.slug}/${response.body.id}/linux/${app.slug}.repo`))
53 | .to.equal(true, 'should create the redhat repo file');
54 |
55 | expect(await helpers.store.hasFile(`${app.slug}/${response.body.id}/linux/debian/binary/Release`))
56 | .to.equal(true, 'should create the debian apt repo metadata');
57 |
58 | expect(await helpers.store.hasFile(`${app.slug}/${response.body.id}/linux/redhat/repodata/repomd.xml`))
59 | .to.equal(true, 'should create the redhat yum repo metadata');
60 | });
61 |
62 | it('should persist the created channel in the /app/:id endpoint', async () => {
63 | const response = await helpers.request
64 | .get(`/app/${app.id}`);
65 |
66 | expect(response).to.have.status(200);
67 | expect(response).to.be.json;
68 | expect(response.body.channels).to.have.lengthOf(1);
69 | expect(response.body.channels[0].name).to.equal('Stable');
70 | });
71 | });
72 | });
73 | });
74 |
--------------------------------------------------------------------------------
/src/migrations/file-index/FileIndexMigration.ts:
--------------------------------------------------------------------------------
1 | import * as path from 'path';
2 |
3 | import BaseMigration, { MigrationItem } from '../BaseMigration';
4 | import driver from '../../db/driver';
5 | import store from '../../files/store';
6 | import Positioner from '../../files/Positioner';
7 | import { IDBDriver } from '../../db/BaseDriver';
8 |
9 | interface FileIndexMigrationItem {
10 | indexKey: string;
11 | originalKey: string;
12 | }
13 |
14 | const SIMULTANEOUS_FETCHES = 5;
15 |
16 | export default class FileIndexMigration extends BaseMigration {
17 | key = 'file-index';
18 | friendlyName = 'File Index Migration';
19 | private positioner: Positioner;
20 |
21 | constructor(private mStore: IFileStore = store, private mDriver: IDBDriver = driver) {
22 | super();
23 | this.positioner = new Positioner(mStore);
24 | }
25 |
26 | async getItems() {
27 | const apps = await this.mDriver.getApps();
28 | const itemFetchers: (() => Promise>)[] = [];
29 |
30 | for (const app of apps) {
31 | for (const channel of app.channels) {
32 | for (const version of channel.versions) {
33 | for (const file of version.files) {
34 | const indexKey = this.positioner.getIndexKey(app, channel, version, file);
35 | let originalKey = path.posix.join(app.slug, channel.id, file.platform, file.arch, file.fileName);
36 | if (file.platform === 'linux') {
37 | if (/\.deb$/.test(file.fileName)) {
38 | originalKey = path.posix.join(app.slug, channel.id, file.platform, 'debian', 'binary', `${version.name}-${file.fileName}`);
39 | } else if (/\.rpm$/.test(file.fileName)) {
40 | originalKey = path.posix.join(app.slug, channel.id, file.platform, 'redhat', `${version.name}-${file.fileName}`);
41 | }
42 | }
43 |
44 | itemFetchers.push(async () => ({
45 | done: await this.mStore.hasFile(indexKey),
46 | data: {
47 | indexKey,
48 | originalKey,
49 | },
50 | }));
51 | }
52 | }
53 | }
54 | }
55 |
56 | const items: MigrationItem[] = [];
57 |
58 | const fetchItem = async () => {
59 | if (itemFetchers.length === 0) return;
60 | const fetcher = itemFetchers.pop()!;
61 |
62 | items.push(await fetcher());
63 | await fetchItem();
64 | };
65 | await Promise.all((Array(SIMULTANEOUS_FETCHES)).fill(null).map(() => fetchItem()));
66 |
67 | return items;
68 | }
69 |
70 | async runOnItem(item: MigrationItem) {
71 | if (item.done) return;
72 | this.d(`copying file from ${item.data.originalKey} to ${item.data.indexKey}`);
73 |
74 | const file = await this.mStore.getFile(item.data.originalKey);
75 | await this.mStore.putFile(item.data.indexKey, file);
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/src/migrations/latest-installer/LatestInstallerMigration.ts:
--------------------------------------------------------------------------------
1 | import * as semver from 'semver';
2 |
3 | import BaseMigration, { MigrationItem } from '../BaseMigration';
4 | import driver from '../../db/driver';
5 | import store from '../../files/store';
6 | import Positioner from '../../files/Positioner';
7 | import { IDBDriver } from '../../db/BaseDriver';
8 |
9 | interface LatestInstallerMigrationItem {
10 | latestKey: string;
11 | indexKey: string;
12 | version: string;
13 | }
14 |
15 | const SIMULTANEOUS_FETCHES = 5;
16 |
17 | export default class LatestInstallerMigration extends BaseMigration {
18 | key = 'latest-installer';
19 | friendlyName = 'Latest Installer Migration';
20 | dependsOn = ['file-index'];
21 | private positioner: Positioner;
22 |
23 | constructor(private mStore: IFileStore = store, private mDriver: IDBDriver = driver) {
24 | super();
25 | this.positioner = new Positioner(mStore);
26 | }
27 |
28 | async getItems() {
29 | const apps = await this.mDriver.getApps();
30 | const itemFetchers: (() => Promise>)[] = [];
31 |
32 | const latestThings: {
33 | [latestKey: string]: {
34 | indexKey: string;
35 | version: string;
36 | };
37 | } = {};
38 |
39 | for (const app of apps) {
40 | for (const channel of app.channels) {
41 | const rolledOutVersions = channel.versions.filter(v => v.rollout === 100 && !v.dead);
42 |
43 | for (const version of rolledOutVersions.sort((a, b) => semver.compare(a.name, b.name))) {
44 | for (const file of version.files) {
45 | if (file.type !== 'installer') continue;
46 |
47 | const latestKey = this.positioner.getLatestKey(app, channel, version, file);
48 | const indexKey = this.positioner.getIndexKey(app, channel, version, file);
49 |
50 | latestThings[latestKey] = {
51 | indexKey,
52 | version: version.name,
53 | };
54 | }
55 | }
56 | }
57 | }
58 |
59 | for (const latestKey in latestThings) {
60 | const latestThing = latestThings[latestKey];
61 |
62 | itemFetchers.push(async () => ({
63 | done: (await this.mStore.getFile(`${latestKey}.ref`)).toString() === latestThing.version,
64 | data: {
65 | latestKey,
66 | indexKey: latestThing.indexKey,
67 | version: latestThing.version,
68 | },
69 | }));
70 | }
71 |
72 | const items: MigrationItem[] = [];
73 |
74 | const fetchItem = async () => {
75 | if (itemFetchers.length === 0) return;
76 | const fetcher = itemFetchers.pop()!;
77 |
78 | items.push(await fetcher());
79 | await fetchItem();
80 | };
81 | await Promise.all((Array(SIMULTANEOUS_FETCHES)).fill(null).map(() => fetchItem()));
82 |
83 | return items;
84 | }
85 |
86 | async runOnItem(item: MigrationItem) {
87 | if (item.done) return;
88 | this.d(`copying latest file from ${item.data.indexKey} to ${item.data.latestKey} for v${item.data.version}`);
89 |
90 | const file = await this.mStore.getFile(item.data.indexKey);
91 | await this.mStore.putFile(item.data.latestKey, file, true);
92 | this.mStore.putFile(`${item.data.latestKey}.ref`, Buffer.from(item.data.version), true);
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/files/__spec__/LocalStore_spec.ts:
--------------------------------------------------------------------------------
1 | import { expect } from 'chai';
2 | import * as fs from 'fs-extra';
3 | import * as os from 'os';
4 | import * as path from 'path';
5 |
6 | import LocalStore from '../local/LocalStore';
7 |
8 | describe('LocalStore', () => {
9 | let dir: string;
10 | let store: LocalStore;
11 | let localConfig: LocalOptions;
12 |
13 | beforeEach(async () => {
14 | dir = await fs.mkdtemp(path.join(os.tmpdir(), '/'));
15 | localConfig = {
16 | root: dir,
17 | staticUrl: 'https://static.url.com/thing',
18 | };
19 | store = new LocalStore(localConfig);
20 | });
21 |
22 | afterEach(async () => {
23 | await fs.remove(dir);
24 | });
25 |
26 | describe('getPublicBaseUrl', () => {
27 | it('should return the staticUrl config property', async () => {
28 | expect(await store.getPublicBaseUrl()).to.equal('https://static.url.com/thing');
29 | });
30 | });
31 |
32 | describe('putFile', () => {
33 | it('should write files to the correct directory', async () => {
34 | await store.putFile('key', Buffer.from('value'));
35 | expect(await fs.readFile(path.resolve(dir, 'key'), 'utf8')).to.equal('value');
36 | });
37 |
38 | it('should not overwrite files by default', async () => {
39 | expect(await store.putFile('key', Buffer.from('value'))).to.equal(true);
40 | expect(await fs.readFile(path.resolve(dir, 'key'), 'utf8')).to.equal('value');
41 | expect(await store.putFile('key', Buffer.from('value2'))).to.equal(false);
42 | expect(await fs.readFile(path.resolve(dir, 'key'), 'utf8')).to.equal('value');
43 | });
44 |
45 | it('should overwrite files when overwrite = true', async () => {
46 | expect(await store.putFile('key', Buffer.from('value'))).to.equal(true);
47 | expect(await fs.readFile(path.resolve(dir, 'key'), 'utf8')).to.equal('value');
48 | expect(await store.putFile('key', Buffer.from('value2'), true)).to.equal(true);
49 | expect(await fs.readFile(path.resolve(dir, 'key'), 'utf8')).to.equal('value2');
50 | });
51 |
52 | it('should write to deep keys', async () => {
53 | expect(await store.putFile('key/goes/super/duper/deep', Buffer.from('deepValue'))).to.equal(true);
54 | expect(await fs.readFile(
55 | path.resolve(dir, 'key', 'goes', 'super', 'duper', 'deep'),
56 | 'utf8',
57 | )).to.equal('deepValue');
58 | });
59 | });
60 |
61 | describe('getFile', () => {
62 | it('should default to empty string buffer', async () => {
63 | expect((await store.getFile('key')).toString()).to.equal('');
64 | });
65 |
66 | it('should load the file contents if it exists', async () => {
67 | await store.putFile('key', Buffer.from('existing'));
68 | expect((await store.getFile('key')).toString()).to.equal('existing');
69 | });
70 | });
71 |
72 | describe('hasFile', () => {
73 | it('should return true when the file exists', async () => {
74 | await store.putFile('key', Buffer.from(''));
75 | expect(await store.hasFile('key')).to.equal(true);
76 | });
77 |
78 | it('should return false when the file does not exist', async () => {
79 | expect(await store.hasFile('key')).to.equal(false);
80 | });
81 |
82 | it('should return false when the path exists but is not a file', async () => {
83 | await store.putFile('dir/key', Buffer.from(''));
84 | expect(await store.hasFile('dir')).to.equal(false);
85 | });
86 | });
87 | });
88 |
--------------------------------------------------------------------------------
/src/files/utils/darwin.ts:
--------------------------------------------------------------------------------
1 | import * as path from 'path';
2 | import * as semver from 'semver';
3 |
4 | export interface MacOSRelease {
5 | version: string;
6 | updateTo: {
7 | version: string;
8 | pub_date: string;
9 | notes: string;
10 | name: string;
11 | url: string;
12 | };
13 | }
14 |
15 | export interface MacOSReleasesStruct {
16 | currentRelease: string;
17 | releases: MacOSRelease[];
18 | }
19 |
20 | export interface DarwinHelperOpts {
21 | app: NucleusApp;
22 | channel: NucleusChannel;
23 | arch: string;
24 | store: IFileStore;
25 | }
26 |
27 | export const generateDarwinReleasesStructure = async ({
28 | app,
29 | channel,
30 | arch,
31 | store,
32 | }: DarwinHelperOpts, rollout = 100) => {
33 | const root = path.posix.join(app.slug, channel.id, 'darwin', arch);
34 | const versions: NucleusVersion[] = channel.versions
35 | .filter(v => !v.dead && v.rollout >= rollout)
36 | .filter((version) => {
37 | return version.files.find(
38 | f => f.fileName.endsWith('.zip') && f.platform === 'darwin' && f.arch === 'x64',
39 | );
40 | });
41 | const releasesJson: MacOSReleasesStruct = {
42 | releases: [],
43 | currentRelease: '',
44 | };
45 | if (versions.length === 0) return releasesJson;
46 |
47 | let greatestVersion = versions[0];
48 | for (const testVersion of versions) {
49 | if (semver.gt(testVersion.name, greatestVersion.name)) {
50 | greatestVersion = testVersion;
51 | }
52 | }
53 |
54 | releasesJson.currentRelease = greatestVersion.name;
55 |
56 | for (const version of versions) {
57 | if (!releasesJson.releases.some(release => release.version === version.name)) {
58 | const zipFileInVersion = version.files.find(
59 | f => f.fileName.endsWith('.zip') && f.platform === 'darwin' && f.arch === 'x64',
60 | )!;
61 | const zipFileKey = path.posix.join(root, zipFileInVersion.fileName);
62 | releasesJson.releases.push({
63 | version: version.name,
64 | updateTo: {
65 | version: version.name,
66 | // FIXME: We should store the creation date on the NucleusVersion
67 | pub_date: (new Date()).toString(),
68 | notes: '',
69 | name: version.name,
70 | url: encodeURI(`${await store.getPublicBaseUrl()}/${zipFileKey}`),
71 | },
72 | });
73 | }
74 | }
75 |
76 | return releasesJson;
77 | };
78 |
79 | export const updateDarwinReleasesFiles = async ({
80 | app,
81 | channel,
82 | arch,
83 | store,
84 | }: DarwinHelperOpts) => {
85 | const root = path.posix.join(app.slug, channel.id, 'darwin', arch);
86 | const releasesKey = path.posix.join(root, 'RELEASES.json');
87 | const releasesJson = await generateDarwinReleasesStructure(
88 | {
89 | app,
90 | channel,
91 | arch,
92 | store,
93 | },
94 | 0, // The default RELEASES.json file ignores all rollout numbers
95 | );
96 | await store.putFile(releasesKey, Buffer.from(JSON.stringify(releasesJson, null, 2), 'utf8'), true);
97 |
98 | for (let rollout = 0; rollout <= 100; rollout += 1) {
99 | const rolloutKey = path.posix.join(root, `${rollout}`, 'RELEASES.json');
100 | const json = await generateDarwinReleasesStructure(
101 | {
102 | app,
103 | channel,
104 | arch,
105 | store,
106 | },
107 | rollout,
108 | );
109 | await store.putFile(rolloutKey, Buffer.from(JSON.stringify(json, null, 2), 'utf8'), true);
110 | }
111 | };
112 |
--------------------------------------------------------------------------------
/public/components/PageWrapper.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { connect } from 'react-redux';
3 | import { RouteComponentProps, Link } from 'react-router';
4 | import AkAvatar from '@atlaskit/avatar';
5 | import AkBanner from '@atlaskit/banner';
6 | import AddIcon from '@atlaskit/icon/glyph/add';
7 | import AkNavigation, { AkContainerTitle, AkNavigationItemGroup, AkNavigationItem, AkGlobalItem } from '@atlaskit/navigation';
8 |
9 | import CreateAppModal from './CreateAppModal';
10 | import UserDropDown from './UserDropDown';
11 | import Logo from '../assets/Logo';
12 |
13 | import * as styles from './PageWrapper.scss';
14 |
15 | /* tslint:disable */
16 | const LinkWrapper = (props) => ;
17 | /* tslint:enable */
18 |
19 | interface PageWrapperReduxProps {
20 | user: UserSubState;
21 | hasPendingMigration: boolean;
22 | }
23 | interface PageWrapperComponentProps {}
24 |
25 | class PageWrapper extends React.PureComponent, {
26 | creatingApp: boolean,
27 | }> {
28 | state = {
29 | creatingApp: false,
30 | };
31 |
32 | private toggleCreate = () => {
33 | this.setState({
34 | creatingApp: !this.state.creatingApp,
35 | });
36 | }
37 |
38 | signedInSecondaryActions() {
39 | const photoUrl = (this.props.user.user.photos && this.props.user.user.photos.length > 0) ? this.props.user.user.photos[0].value : '';
40 | return [
41 |
42 |
43 | ,
44 | ];
45 | }
46 |
47 | signedOutSecondaryActions() {
48 | return [ ];
49 | }
50 |
51 | render() {
52 | const isSignedIn = this.props.user.signedIn;
53 | const navProps = isSignedIn ? () => ({
54 | globalSecondaryActions: this.signedInSecondaryActions(),
55 | }) : () => ({
56 | globalSecondaryActions: this.signedOutSecondaryActions(),
57 | });
58 | const isAdmin = this.props.user.signedIn ? this.props.user.user.isAdmin : false;
59 |
60 | return (
61 |
62 |
63 |
}
65 | globalPrimaryItemHref="/apps"
66 | isResizeable={false}
67 | globalCreateIcon={
}
68 | onCreateDrawerOpen={this.toggleCreate}
69 | {...navProps()}
70 | >
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 | Your Nucleus instance has pending migrations and won't be able to create or modify releases until migrations have been run, admins can run migrations by visiting /migrations
80 |
81 |
82 | {this.props.children}
83 |
84 |
85 |
86 | );
87 | }
88 | }
89 |
90 | const mapStateToProps = (state: AppState) => ({
91 | user: state.user,
92 | hasPendingMigration: state.migrations.hasPendingMigration,
93 | });
94 |
95 | export default connect(mapStateToProps, null)(PageWrapper as any);
96 |
--------------------------------------------------------------------------------
/src/migrations/file-index/__spec__/FileIndexMigration_spec.ts:
--------------------------------------------------------------------------------
1 | import { expect } from 'chai';
2 | import * as fs from 'fs-extra';
3 | import * as os from 'os';
4 | import * as path from 'path';
5 | import { SinonStub, stub } from 'sinon';
6 |
7 | import FileIndexMigration from '../FileIndexMigration';
8 | import LocalStore from '../../../files/local/LocalStore';
9 |
10 | const fakeApp = {
11 | slug: 'app',
12 | channels: [{
13 | id: 'channel',
14 | versions: [{
15 | name: 'version',
16 | files: [{
17 | platform: 'darwin',
18 | arch: 'x64',
19 | fileName: 'App.dmg',
20 | }, {
21 | platform: 'win32',
22 | arch: 'ia32',
23 | fileName: 'App.exe',
24 | }, {
25 | platform: 'linux',
26 | arch: 'x64',
27 | fileName: 'App.deb',
28 | }],
29 | }],
30 | }],
31 | };
32 |
33 | describe('FileIndexMigration', () => {
34 | let dir: string;
35 | let store: LocalStore;
36 | let localConfig: LocalOptions;
37 | let migrator: FileIndexMigration;
38 | let fakeDriver: {
39 | getApps: SinonStub;
40 | };
41 |
42 | beforeEach(async () => {
43 | dir = await fs.mkdtemp(path.join(os.tmpdir(), '/'));
44 | localConfig = {
45 | root: dir,
46 | staticUrl: 'https://static.url.com/thing',
47 | };
48 | store = new LocalStore(localConfig);
49 | fakeDriver = {
50 | getApps: stub(),
51 | };
52 | migrator = new FileIndexMigration(store, fakeDriver as any);
53 | });
54 |
55 | afterEach(async () => {
56 | await fs.remove(dir);
57 | });
58 |
59 | describe('getItems', () => {
60 | it('should return an empty array when there are no files', async () => {
61 | fakeDriver.getApps.returns(Promise.resolve([]));
62 | expect(await migrator.getItems()).to.deep.equal([]);
63 | });
64 |
65 | it('should check for existence of index files and mark as done appropriately', async () => {
66 | fakeDriver.getApps.returns(Promise.resolve([fakeApp]));
67 | const hasFileStub = stub(store, 'hasFile');
68 | hasFileStub.onCall(0).returns(Promise.resolve(false));
69 | hasFileStub.onCall(1).returns(Promise.resolve(true));
70 | hasFileStub.onCall(2).returns(Promise.resolve(false));
71 | const items = await migrator.getItems();
72 | expect(items.length).to.equal(3);
73 | expect(items[0].done).to.equal(false);
74 | expect(items[1].done).to.equal(true);
75 | expect(items[2].done).to.equal(false);
76 | });
77 |
78 | it('should work for a list of files longer than 5', async () => {
79 | fakeDriver.getApps.returns(Promise.resolve([fakeApp, fakeApp, fakeApp, fakeApp, fakeApp]));
80 | const items = await migrator.getItems();
81 | expect(items.length).to.equal(15);
82 | });
83 | });
84 |
85 | describe('runOnItem', () => {
86 | it('should do no work if the item is flagged as done', async () => {
87 | const getFile = stub(store, 'getFile');
88 | await migrator.runOnItem({
89 | done: true,
90 | data: {} as any,
91 | });
92 | expect(getFile.callCount).to.equal(0);
93 | });
94 |
95 | it('should copy file the file if the item is flagged as not done', async () => {
96 | const getFile = stub(store, 'getFile');
97 | getFile.returns(Promise.resolve(Buffer.from('test 123')));
98 | const putFile = stub(store, 'putFile');
99 | putFile.returns(Promise.resolve(true));
100 | await migrator.runOnItem({
101 | done: false,
102 | data: {
103 | originalKey: 'original/key/in/store',
104 | indexKey: 'index/key/to/move/to',
105 | },
106 | });
107 | expect(getFile.callCount).to.equal(1);
108 | expect(getFile.firstCall.args[0]).to.equal('original/key/in/store');
109 | expect(putFile.callCount).to.equal(1);
110 | expect(putFile.firstCall.args[0]).to.equal('index/key/to/move/to');
111 | expect(putFile.firstCall.args[1].toString()).to.equal('test 123');
112 | });
113 | });
114 | });
115 |
--------------------------------------------------------------------------------
/src/rest/WebHook.ts:
--------------------------------------------------------------------------------
1 | import * as debug from 'debug';
2 | import fetch from 'node-fetch';
3 |
4 | import { baseURL } from '../config';
5 | import driver from '../db/driver';
6 |
7 | const { version: nucleusVersion } = require('../../package.json');
8 |
9 | const d = debug('nucleus:web-hook');
10 |
11 | export default class WebHook {
12 | private id: number;
13 | private url: string;
14 | private secret: string;
15 | private app: NucleusApp;
16 | private registered: boolean;
17 |
18 | static fromNucleusHook(app: NucleusApp, hook: NucleusWebHook) {
19 | return new WebHook(hook.id, hook.url, hook.secret, app, hook.registered);
20 | }
21 |
22 | constructor(id: number, url: string, secret: string, app: NucleusApp, registered: boolean) {
23 | this.id = id;
24 | this.url = url;
25 | this.secret = secret;
26 | this.app = app;
27 | this.registered = registered;
28 | }
29 |
30 | getURL() {
31 | return this.url;
32 | }
33 |
34 | private async fire(type: string, extendedInfo: Object) {
35 | d(`Calling: '${this.url}' with type: '${type}'`);
36 | try {
37 | const response = await fetch(this.url, {
38 | method: 'POST',
39 | headers: {
40 | Authorization: this.secret,
41 | 'Content-Type': 'application/json',
42 | 'User-Agent': `Nucleus/${nucleusVersion}`,
43 | },
44 | body: JSON.stringify(Object.assign({}, extendedInfo, {
45 | type,
46 | nucleusOrigin: baseURL,
47 | app: {
48 | id: this.app.id,
49 | name: this.app.name,
50 | slug: this.app.slug,
51 | },
52 | })),
53 | });
54 | if (response.status !== 200) {
55 | d(`Unexpected status code occurred while calling: ${this.url}`, response.status);
56 | await driver.createWebHookError(
57 | this.app,
58 | this.id,
59 | `Unexpected status code while calling with type: '${type}'`,
60 | response.status,
61 | await response.text(),
62 | );
63 | return false;
64 | } else {
65 | return true;
66 | }
67 | } catch (err) {
68 | d(`Fetching: ${this.url} failed with error`, err);
69 | await driver.createWebHookError(
70 | this.app,
71 | this.id,
72 | `Failed to fetch: ${type}`,
73 | -1,
74 | err.message,
75 | );
76 | return false;
77 | }
78 | }
79 |
80 | async register() {
81 | if (!this.registered) {
82 | this.registered = await this.fire('register', {});
83 | await driver.setWebHookRegistered(this.app, this.id, this.registered);
84 | return this.registered;
85 | }
86 | }
87 |
88 | async unregister() {
89 | if (this.registered) {
90 | this.registered = !(await this.fire('unregister', {}));
91 | await driver.setWebHookRegistered(this.app, this.id, this.registered);
92 | return !this.registered;
93 | }
94 | return true;
95 | }
96 |
97 | async newChannel(channel: NucleusChannel) {
98 | if (!this.registered) return false;
99 | return await this.fire('channel_created', {
100 | channel: {
101 | id: channel.id,
102 | name: channel.name,
103 | },
104 | });
105 | }
106 |
107 | async newVersion(channel: NucleusChannel, version: NucleusVersion) {
108 | if (!this.registered) return false;
109 | return await this.fire('version_created', {
110 | channel: {
111 | id: channel.id,
112 | name: channel.name,
113 | },
114 | version: {
115 | name: version.name,
116 | },
117 | });
118 | }
119 |
120 | async newVersionFile(channel: NucleusChannel, version: NucleusVersion) {
121 | if (!this.registered) return false;
122 | return await this.fire('', {
123 | channel: {
124 | id: channel.id,
125 | name: channel.name,
126 | },
127 | version: {
128 | name: version.name,
129 | files: version.files,
130 | },
131 | });
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/src/files/s3/S3Store.ts:
--------------------------------------------------------------------------------
1 | import * as AWS from 'aws-sdk';
2 | import * as debug from 'debug';
3 |
4 | import { CloudFrontBatchInvalidator } from './CloudFrontBatchInvalidator';
5 | import * as config from '../../config';
6 |
7 | const d = debug('nucleus:s3');
8 |
9 | AWS.config.credentials = new AWS.EC2MetadataCredentials({
10 | httpOptions: { timeout: 5000 },
11 | maxRetries: 10,
12 | });
13 |
14 | export default class S3Store implements IFileStore {
15 | constructor(public s3Config = config.s3) {}
16 |
17 | public async hasFile(key: string) {
18 | const s3 = this.getS3();
19 | return await new Promise(resolve => s3.headObject({
20 | Bucket: this.s3Config.bucketName,
21 | Key: key,
22 | }, (err) => {
23 | if (err && err.code === 'NotFound') return resolve(false);
24 | resolve(true);
25 | }));
26 | }
27 |
28 | public async getFileSize(key: string) {
29 | const s3 = this.getS3();
30 | return await new Promise(resolve => s3.headObject({
31 | Bucket: this.s3Config.bucketName,
32 | Key: key,
33 | }, (err, info) => {
34 | if (err && err.code === 'NotFound') return resolve(0);
35 | resolve(info.ContentLength || 0);
36 | }));
37 | }
38 |
39 | public async putFile(key: string, data: Buffer, overwrite = false) {
40 | d(`Putting file: '${key}', overwrite=${overwrite ? 'true' : 'false'}`);
41 | const s3 = this.getS3();
42 | const keyExists = async () => await this.hasFile(key);
43 | let wrote = false;
44 | if (overwrite || !await keyExists()) {
45 | d(`Deciding to write file (either because overwrite is enabled or the key didn't exist)`);
46 | await new Promise((resolve, reject) => s3.putObject({
47 | Bucket: this.s3Config.bucketName,
48 | Key: key,
49 | Body: data,
50 | ACL: 'public-read',
51 | }, (err, data) => {
52 | if (err) return reject(err);
53 | resolve();
54 | }));
55 | wrote = true;
56 | }
57 | if (overwrite) {
58 | CloudFrontBatchInvalidator.get(this).addToBatch(key);
59 | }
60 | return wrote;
61 | }
62 |
63 | public async getFile(key: string) {
64 | d(`Fetching file: '${key}'`);
65 | return await new Promise((resolve) => {
66 | const s3 = this.getS3();
67 | s3.getObject({
68 | Bucket: this.s3Config.bucketName,
69 | Key: key,
70 | }, (err, data) => {
71 | if (err) {
72 | d('File not found, defaulting to empty buffer');
73 | return resolve(Buffer.from(''));
74 | }
75 | resolve(data.Body as Buffer);
76 | });
77 | });
78 | }
79 |
80 | public async deletePath(key: string) {
81 | d(`Deleting files under path: '${key}'`);
82 | const s3 = this.getS3();
83 | const keys = await this.listFiles(key);
84 | d(`Found objects to delete: [${keys.join(', ')}]`);
85 | await new Promise((resolve) => {
86 | s3.deleteObjects({
87 | Bucket: this.s3Config.bucketName,
88 | Delete: {
89 | Objects: keys.map(key => ({
90 | Key: key,
91 | })),
92 | },
93 | }, () => resolve());
94 | });
95 | }
96 |
97 | public async getPublicBaseUrl() {
98 | const { cloudfront, init } = this.s3Config;
99 |
100 | if (cloudfront) {
101 | return cloudfront.publicUrl;
102 | }
103 |
104 | if (init && init.endpoint) {
105 | return init.endpoint;
106 | }
107 |
108 | return `https://${this.s3Config.bucketName}.s3.amazonaws.com`;
109 | }
110 |
111 | public async listFiles(prefix: string) {
112 | d(`Listing files under path: '${prefix}'`);
113 | const s3 = this.getS3();
114 | const objects = await new Promise((resolve) => {
115 | s3.listObjects({
116 | Bucket: this.s3Config.bucketName,
117 | Prefix: prefix,
118 | }, (err, data) => {
119 | resolve(data.Contents);
120 | });
121 | });
122 | return objects.map(object => object.Key).filter(key => !!key) as string[];
123 | }
124 |
125 | private getS3() {
126 | if (this.s3Config.init) {
127 | return new AWS.S3(this.s3Config.init);
128 | }
129 | return new AWS.S3();
130 | }
131 | }
132 |
--------------------------------------------------------------------------------
/src/db/BaseDriver.ts:
--------------------------------------------------------------------------------
1 | import * as path from 'path';
2 | import * as semver from 'semver';
3 | import * as toIco from 'to-ico';
4 |
5 | import store from '../files/store';
6 | import BaseMigration from '../migrations/BaseMigration';
7 |
8 | const IDENTIFYING_SUFFIXES = ['-full.nupkg', '-delta.nupkg', '.exe', '.msi', '.zip', '.dmg', '.pkg', '.deb', '.rpm'];
9 |
10 | export abstract class IDBDriver {
11 | public abstract ensureConnected(): Promise;
12 | public abstract getApps(): Promise;
13 | public abstract createApp(owner: User, name: string, icon: Buffer): Promise;
14 | public abstract setTeam(app: NucleusApp, userIdents: string[]): Promise;
15 | public abstract resetAppToken(app: NucleusApp): Promise;
16 | public abstract getApp(id: AppID): Promise;
17 | public abstract createChannel(app: NucleusApp, channelName: string): Promise;
18 | public abstract renameChannel(app: NucleusApp, channel: NucleusChannel, newName: string): Promise;
19 | public abstract getChannel(app: NucleusApp, channelId: ChannelID): Promise;
20 | public abstract deleteTemporarySave(save: ITemporarySave): Promise;
21 | public abstract getTemporarySave(temporaryId: number): Promise;
22 | public abstract getTemporarySaves(app: NucleusApp, channel: NucleusChannel): Promise;
23 | public abstract saveTemporaryVersionFiles(app: NucleusApp, channel: NucleusChannel, version: string, filenames: string[], arch: string, platform: NucleusPlatform): Promise;
24 | public abstract registerVersionFiles(save: ITemporarySave): Promise;
25 | public abstract createWebHook(app: NucleusApp, url: string, secret: string): Promise;
26 | public abstract getWebHook(app: NucleusApp, webHookId: number): Promise;
27 | public abstract deleteWebHook(app: NucleusApp, webHookId: number): Promise;
28 | public abstract createWebHookError(app: NucleusApp, webHookId: number, message: string, code: number, body: string): Promise;
29 | public abstract setWebHookRegistered(app: NucleusApp, webHookId: number, registered: boolean): Promise;
30 | public abstract setVersionDead(app: NucleusApp, channel: NucleusChannel, version: string, dead: boolean): Promise;
31 | public abstract setVersionRollout(app: NucleusApp, channel: NucleusChannel, version: string, rollout: number): Promise;
32 | // Migrations
33 | public abstract addMigrationIfNotExists(migration: BaseMigration): Promise;
34 | public abstract getMigrations(): Promise;
35 | // SHA
36 | public abstract storeSHAs(file: NucleusFile, hashes: HashSet): Promise;
37 | }
38 |
39 | export default abstract class BaseDriver extends IDBDriver {
40 | public async saveIcon(app: NucleusApp, icon: Buffer, wipePrevious = false) {
41 | await store.putFile(path.posix.join(app.slug, 'icon.png'), icon, wipePrevious);
42 | const iconAsIco = await toIco([icon], {
43 | resize: true,
44 | sizes: [16, 24, 32, 48, 64, 128, 256],
45 | });
46 | await store.putFile(path.posix.join(app.slug, 'icon.ico'), iconAsIco, wipePrevious);
47 | }
48 |
49 | protected sluggify(name: string) {
50 | return name.replace(/ /g, '-').replace(/\//, '-');
51 | }
52 |
53 | protected orderVersions(versions: NucleusVersion[]) {
54 | return ([] as NucleusVersion[]).concat(versions).sort((a, b) => {
55 | return semver.compare(a.name, b.name);
56 | });
57 | }
58 |
59 | protected writeVersionsFileToStore = async (app: NucleusApp, channel: NucleusChannel) => {
60 | const deepChannel = Object.assign({}, (await this.getApp(app.id!)))
61 | .channels
62 | .find(testChannel => testChannel.id === channel.id);
63 | if (!deepChannel) return;
64 | const versionsToWrite = deepChannel.versions;
65 | await store.putFile(path.posix.join(app.slug, channel.id, 'versions.json'), Buffer.from(JSON.stringify(versionsToWrite, null, 2)), true);
66 | }
67 |
68 | /**
69 | * This method compares to file names to determine if they are technically the same
70 | * file in the context of a single version/platform/arch combination. This is used
71 | * to ensure we never upload two -full.nupkg files to a single version, or two .dmg
72 | * files.
73 | *
74 | * @param file1 The name of the first file
75 | * @param file2 The name of the second file
76 | */
77 | protected isInherentlySameFile(file1: string, file2: string) {
78 | for (const suffix of IDENTIFYING_SUFFIXES) {
79 | if (file1.endsWith(suffix) && file2.endsWith(suffix)) {
80 | return true;
81 | }
82 | }
83 | return false;
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/files/utils/yum.ts:
--------------------------------------------------------------------------------
1 | import * as cp from 'child-process-promise';
2 | import * as fs from 'fs-extra';
3 | import * as path from 'path';
4 |
5 | import { spawnPromiseAndCapture, escapeShellArguments } from './spawn';
6 | import { syncDirectoryToStore, syncStoreToDirectory } from './sync';
7 | import { withTmpDir } from './tmp';
8 | import * as config from '../../config';
9 |
10 | const getCreateRepoCommand = (dir: string, args: string[]): [string, string[]] => {
11 | if (process.platform === 'linux') {
12 | return ['createrepo', args];
13 | }
14 | return [
15 | 'docker',
16 | ['run', '--rm', '-v', `${dir}:/root`, 'tomologic/createrepo', ...args],
17 | ];
18 | };
19 |
20 | const getSignRpmCommand = (dir: string, args: string[]): [string, string[]] => {
21 | if (process.platform === 'linux') {
22 | return ['rpmsign', args];
23 | }
24 | const safeArgs = escapeShellArguments(args);
25 | return [
26 | 'docker',
27 | ['run', '--rm', '-v', `${dir}:/root/working`, 'marshallofsound/sh', `(gpg-agent --daemon) && (gpg --import key.asc || true) && (rpmsign ${safeArgs.join(' ')})`],
28 | ];
29 | };
30 |
31 | const createRepoFile = async (store: IFileStore, app: NucleusApp, channel: NucleusChannel) => {
32 | await store.putFile(
33 | path.posix.join(app.slug, channel.id, 'linux', `${app.slug}.repo`),
34 | Buffer.from(
35 | `[packages]
36 | name=${app.name} Packages
37 | baseurl=${await store.getPublicBaseUrl()}/${app.slug}/${channel.id}/linux/redhat
38 | enabled=1
39 | gpgcheck=1`,
40 | ),
41 | true,
42 | );
43 | };
44 |
45 | const signRpm = async (rpm: string) => {
46 | await withTmpDir(async (tmpDir) => {
47 | const fileName = path.basename(rpm);
48 | const tmpFile = path.resolve(tmpDir, fileName);
49 | await fs.copy(rpm, tmpFile);
50 | // Import GPG key
51 | const key = path.resolve(tmpDir, 'key.asc');
52 | await fs.writeFile(key, config.gpgSigningKey);
53 | const [stdout, stderr] = await spawnPromiseAndCapture('gpg', ['--import', key]);
54 |
55 | const keyImport = stdout.toString() + '--' + stderr.toString();
56 | const keyMatch = keyImport.match(/ key ([A-Za-z0-9]+):/);
57 | if (!keyMatch || !keyMatch[1]) {
58 | console.error(JSON.stringify(keyImport));
59 | throw new Error('Bad GPG import');
60 | }
61 | const keyId = keyMatch[1];
62 | // Sign the RPM file
63 | const [exe, args] = getSignRpmCommand(tmpDir, ['-D', `_gpg_name ${keyId}`, '--addsign', path.basename(rpm)]);
64 | const [signOut, signErr, signError] = await spawnPromiseAndCapture(exe, args, {
65 | cwd: tmpDir,
66 | });
67 | if (signError) {
68 | console.error('Failed to sign RPM file');
69 | console.error(`Output:\n${signOut.toString()}\n\n${signErr.toString()}`);
70 | throw signError;
71 | }
72 | // Done signing
73 | await fs.copy(tmpFile, rpm, {
74 | overwrite: true,
75 | });
76 | });
77 | };
78 |
79 | const signAllRpmFiles = async (dir: string) => {
80 | const rpmFiles = (await fs.readdir(dir))
81 | .filter(file => file.endsWith('.rpm'))
82 | .map(file => path.resolve(dir, file));
83 | for (const rpm of rpmFiles) {
84 | await signRpm(rpm);
85 | }
86 | };
87 |
88 | export const initializeYumRepo = async (store: IFileStore, app: NucleusApp, channel: NucleusChannel) => {
89 | await withTmpDir(async (tmpDir) => {
90 | const [exe, args] = getCreateRepoCommand(tmpDir, ['-v', '--no-database', './']);
91 | await cp.spawn(exe, args, {
92 | cwd: tmpDir,
93 | });
94 | await syncDirectoryToStore(
95 | store,
96 | path.posix.join(app.slug, channel.id, 'linux', 'redhat'),
97 | tmpDir,
98 | );
99 | await createRepoFile(store, app, channel);
100 | });
101 | };
102 |
103 | export const addFileToYumRepo = async (store: IFileStore, {
104 | app,
105 | channel,
106 | internalVersion,
107 | file,
108 | fileData,
109 | }: HandlePlatformUploadOpts) => {
110 | await withTmpDir(async (tmpDir) => {
111 | const storeKey = path.posix.join(app.slug, channel.id, 'linux', 'redhat');
112 | await syncStoreToDirectory(
113 | store,
114 | storeKey,
115 | tmpDir,
116 | );
117 | const binaryPath = path.resolve(tmpDir, `${internalVersion.name}-${file.fileName}`);
118 | if (await fs.pathExists(binaryPath)) {
119 | throw new Error('Uploaded a duplicate file');
120 | }
121 | await fs.writeFile(binaryPath, fileData);
122 | await signAllRpmFiles(tmpDir);
123 | const [exe, args] = getCreateRepoCommand(tmpDir, ['-v', '--update', '--no-database', '--deltas', './']);
124 | await cp.spawn(exe, args, {
125 | cwd: tmpDir,
126 | });
127 | await syncDirectoryToStore(
128 | store,
129 | storeKey,
130 | tmpDir,
131 | );
132 | await createRepoFile(store, app, channel);
133 | });
134 | };
135 |
--------------------------------------------------------------------------------
/src/files/utils/apt.ts:
--------------------------------------------------------------------------------
1 | import * as cp from 'child-process-promise';
2 | import * as fs from 'fs-extra';
3 | import * as path from 'path';
4 |
5 | import { gpgSign } from './gpg';
6 | import { syncDirectoryToStore, syncStoreToDirectory } from './sync';
7 | import { withTmpDir } from './tmp';
8 | import * as config from '../../config';
9 |
10 | const getScanPackagesCommand = (dir: string, args: string[]): [string, string[]] => {
11 | if (process.platform === 'linux') {
12 | return ['dpkg-scanpackages', args];
13 | }
14 | return [
15 | 'docker',
16 | ['run', '--rm', '-v', `${dir}:/root`, 'marshallofsound/dpkg-scanpackages', ...args],
17 | ];
18 | };
19 |
20 | const getScanSourcesCommand = (dir: string, args: string[]): [string, string[]] => {
21 | if (process.platform === 'linux') {
22 | return ['dpkg-scansources', args];
23 | }
24 | return [
25 | 'docker',
26 | ['run', '--rm', '-v', `${dir}:/root`, 'marshallofsound/dpkg-scansources', ...args],
27 | ];
28 | };
29 |
30 | const spawnAndGzip = async ([command, args]: [string, string[]], cwd: string): Promise<[Buffer, Buffer]> => {
31 | const result = await cp.spawn(command, args, {
32 | cwd,
33 | capture: ['stdout'],
34 | });
35 | const output: Buffer = result.stdout;
36 | return await withTmpDir(async (tmpDir: string) => {
37 | await fs.writeFile(path.resolve(tmpDir, 'file'), output);
38 | await cp.spawn('gzip', ['-9', 'file'], {
39 | cwd: tmpDir,
40 | capture: ['stdout'],
41 | });
42 | const content = await fs.readFile(path.resolve(tmpDir, 'file.gz'));
43 | return [output, content] as [Buffer, Buffer];
44 | });
45 | };
46 |
47 | const getAptFtpArchiveCommand = (dir: string, args: string[]): [string, string[]] => {
48 | if (process.platform === 'linux') {
49 | return ['apt-ftparchive', args];
50 | }
51 | return [
52 | 'docker',
53 | ['run', '--rm', '-v', `${dir}:/root`, 'marshallofsound/apt-ftparchive', ...args],
54 | ];
55 | };
56 |
57 | const generateReleaseFile = async (tmpDir: string, app: NucleusApp) => {
58 | const configFile = path.resolve(tmpDir, 'Release.conf');
59 | await fs.writeFile(configFile, `APT::FTPArchive::Release::Origin "${config.organization || 'Nucleus'}";
60 | APT::FTPArchive::Release::Label "${app.name}";
61 | APT::FTPArchive::Release::Suite "stable";
62 | APT::FTPArchive::Release::Codename "debian";
63 | APT::FTPArchive::Release::Architectures "i386 amd64";
64 | APT::FTPArchive::Release::Components "main";
65 | APT::FTPArchive::Release::Description "${app.name}";`);
66 | const [exe, args] = getAptFtpArchiveCommand(tmpDir, ['-c=Release.conf', 'release', '.']);
67 | const { stdout } = await cp.spawn(exe, args, {
68 | cwd: path.resolve(tmpDir),
69 | capture: ['stdout', 'stderr'],
70 | });
71 | await fs.writeFile(path.resolve(tmpDir, 'Release'), stdout);
72 | await gpgSign(path.resolve(tmpDir, 'Release'), path.resolve(tmpDir, 'Release.gpg'));
73 | await fs.remove(configFile);
74 | };
75 |
76 | const writeAptMetadata = async (tmpDir: string, app: NucleusApp) => {
77 | const packagesContent = await spawnAndGzip(getScanPackagesCommand(tmpDir, ['binary', '/dev/null']), tmpDir);
78 | await fs.writeFile(path.resolve(tmpDir, 'binary', 'Packages'), packagesContent[0]);
79 | await fs.writeFile(path.resolve(tmpDir, 'binary', 'Packages.gz'), packagesContent[1]);
80 | const sourcesContent = await spawnAndGzip(getScanSourcesCommand(tmpDir, ['binary', '/dev/null']), tmpDir);
81 | await fs.writeFile(path.resolve(tmpDir, 'binary', 'Sources'), sourcesContent[0]);
82 | await fs.writeFile(path.resolve(tmpDir, 'binary', 'Sources.gz'), sourcesContent[1]);
83 | await generateReleaseFile(path.resolve(tmpDir, 'binary'), app);
84 | };
85 |
86 | export const initializeAptRepo = async (store: IFileStore, app: NucleusApp, channel: NucleusChannel) => {
87 | await withTmpDir(async (tmpDir) => {
88 | await fs.mkdirs(path.resolve(tmpDir, 'binary'));
89 | await writeAptMetadata(tmpDir, app);
90 | await syncDirectoryToStore(
91 | store,
92 | path.posix.join(app.slug, channel.id, 'linux', 'debian'),
93 | tmpDir,
94 | );
95 | });
96 | };
97 |
98 | export const addFileToAptRepo = async (store: IFileStore, {
99 | app,
100 | channel,
101 | internalVersion,
102 | file,
103 | fileData,
104 | }: HandlePlatformUploadOpts) => {
105 | await withTmpDir(async (tmpDir) => {
106 | const storeKey = path.posix.join(app.slug, channel.id, 'linux', 'debian');
107 | await syncStoreToDirectory(
108 | store,
109 | storeKey,
110 | tmpDir,
111 | );
112 | await fs.mkdirs(path.resolve(tmpDir, 'binary'));
113 | const binaryPath = path.resolve(tmpDir, 'binary', `${internalVersion.name}-${file.fileName}`);
114 | if (await fs.pathExists(binaryPath)) {
115 | throw new Error('Uploaded a duplicate file');
116 | }
117 | await fs.writeFile(binaryPath, fileData);
118 | await writeAptMetadata(tmpDir, app);
119 | await syncDirectoryToStore(
120 | store,
121 | storeKey,
122 | tmpDir,
123 | );
124 | });
125 | };
126 |
--------------------------------------------------------------------------------
/src/__spec__/webhook_spec.ts:
--------------------------------------------------------------------------------
1 | import * as chai from 'chai';
2 |
3 | import * as helpers from './_helpers';
4 |
5 | const { expect } = chai;
6 |
7 | describe('webbhook endpoints', () => {
8 | before(helpers.startTestNucleus);
9 |
10 | let app: NucleusApp;
11 |
12 | before(async () => {
13 | app = await helpers.createApp();
14 | });
15 |
16 | describe('/app/:id/webhook', () => {
17 | describe('POST', () => {
18 | it('should error if no url is provided', async () => {
19 | const response = await helpers.request
20 | .post(`/app/${app.id}/webhook`);
21 |
22 | expect(response).to.have.status(400);
23 | expect(response).to.be.json;
24 | expect(response.body.error).to.equal('Missing required body param: "url"');
25 | });
26 |
27 | it('should error if no secret is provided', async () => {
28 | const response = await helpers.request
29 | .post(`/app/${app.id}/webhook`)
30 | .send({
31 | url: 'fake',
32 | });
33 |
34 | expect(response).to.have.status(400);
35 | expect(response).to.be.json;
36 | expect(response.body.error).to.equal('Missing required body param: "secret"');
37 | });
38 |
39 | it('should error if a non string is provided', async () => {
40 | const response = await helpers.request
41 | .post(`/app/${app.id}/webhook`)
42 | .send({
43 | url: [],
44 | secret: 'cats',
45 | });
46 |
47 | expect(response).to.have.status(400);
48 | expect(response).to.be.json;
49 | expect(response.body.error).to.equal('Invalid URL provided');
50 | });
51 |
52 | it('should error if an invalid URL protocol is provided', async () => {
53 | const response = await helpers.request
54 | .post(`/app/${app.id}/webhook`)
55 | .send({
56 | url: 'file://magic',
57 | secret: 'cats',
58 | });
59 |
60 | expect(response).to.have.status(400);
61 | expect(response).to.be.json;
62 | expect(response.body.error).to.equal('Invalid URL provided');
63 | });
64 |
65 | it('should error if a localhost URL is provided', async () => {
66 | const response = await helpers.request
67 | .post(`/app/${app.id}/webhook`)
68 | .send({
69 | url: 'http://localhost',
70 | secret: 'cats',
71 | });
72 |
73 | expect(response).to.have.status(400);
74 | expect(response).to.be.json;
75 | expect(response.body.error).to.equal('Invalid URL provided');
76 | });
77 |
78 | it('should error if a 127.0.0.1 URL is provided', async () => {
79 | const response = await helpers.request
80 | .post(`/app/${app.id}/webhook`)
81 | .send({
82 | url: 'http://127.0.0.1',
83 | secret: 'cats',
84 | });
85 |
86 | expect(response).to.have.status(400);
87 | expect(response).to.be.json;
88 | expect(response.body.error).to.equal('Invalid URL provided');
89 | });
90 |
91 | it('should succeed if a valid url and secret are provided', async function () {
92 | this.timeout(4000);
93 |
94 | const response = await helpers.request
95 | .post(`/app/${app.id}/webhook`)
96 | .send({
97 | url: 'https://httpbin.org/post',
98 | secret: 'cats',
99 | });
100 |
101 | expect(response).to.have.status(200);
102 | expect(response).to.be.json;
103 | expect(response.body).to.have.property('success', true);
104 | expect(response.body.hook).to.have.property('secret', 'cats');
105 | expect(response.body.hook).to.have.property('url', 'https://httpbin.org/post');
106 | });
107 | });
108 | });
109 |
110 | describe('/app/:id/webhook/:webhookId', () => {
111 | describe('DELETE', () => {
112 | it('should error if the webhook does not exist', async function () {
113 | this.timeout(4000);
114 |
115 | const response = await helpers.request
116 | .del(`/app/${app.id}/webhook/100`);
117 |
118 | expect(response).to.have.status(404);
119 | expect(response).to.be.json;
120 | expect(response.body.error).to.equal('Not Found');
121 | });
122 |
123 | it('should unregister a valid webhook', async function () {
124 | this.timeout(4000);
125 |
126 | const response = await helpers.request
127 | .del(`/app/${app.id}/webhook/1`);
128 |
129 | expect(response).to.have.status(200);
130 | expect(response).to.be.json;
131 | expect(response.body).to.deep.equal({ success: true });
132 | });
133 |
134 | it('should fail to unregister an already unregistered webhook', async () => {
135 | const response = await helpers.request
136 | .del(`/app/${app.id}/webhook/1`);
137 |
138 | expect(response).to.have.status(404);
139 | expect(response).to.be.json;
140 | expect(response.body.error).to.equal('Not Found');
141 | });
142 | });
143 | });
144 |
145 | after(helpers.stopTestNucleus);
146 | });
147 |
--------------------------------------------------------------------------------
/public/components/CreateAppModal.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 | import { connect, Dispatch } from 'react-redux';
3 |
4 | import AkButton from '@atlaskit/button';
5 | import AkFieldBase, { Label as AkLabel } from '@atlaskit/field-base';
6 | import AkFieldTextExported from '@atlaskit/field-text';
7 | import AkModalDialog from '@atlaskit/modal-dialog';
8 | import AkSpinner from '@atlaskit/spinner';
9 |
10 | import { fetchApps, setApps } from '../actions/apps';
11 |
12 | import * as styles from './CreateAppModal.scss';
13 |
14 | /* tslint:disable */
15 | const AkFieldText = AkFieldTextExported as any;
16 | /* tslint:enable */
17 |
18 | interface CreateAppModalReduxProps {
19 | hasPendingMigration: boolean;
20 | }
21 | interface CreateAppModalReduxDispatchProps {
22 | setApps: (apps: NucleusApp[]) => any;
23 | }
24 | interface CreateAppModalComponentProps {
25 | onDismiss: () => void;
26 | isOpen: boolean;
27 | }
28 |
29 | class CreateAppModal extends React.PureComponent {
35 | private fileInput: HTMLInputElement;
36 | state = {
37 | name: '',
38 | creating: false,
39 | nameInvalid: false,
40 | fileInvalid: false,
41 | };
42 |
43 | close = () => {
44 | if (this.state.creating) return;
45 |
46 | this.props.onDismiss();
47 | this.setState({
48 | name: '',
49 | nameInvalid: false,
50 | fileInvalid: false,
51 | });
52 | }
53 |
54 | private isPng = (file: File) => {
55 | return file.type === 'image/png';
56 | }
57 |
58 | create = async () => {
59 | const fileInvalid = !this.fileInput.value || !this.fileInput.files.length
60 | || !this.fileInput.files[0] || !this.isPng(this.fileInput.files[0]);
61 | const nameInvalid = !this.state.name;
62 | this.setState({
63 | fileInvalid,
64 | nameInvalid,
65 | });
66 | if (!nameInvalid && !fileInvalid) {
67 | this.setState({
68 | creating: true,
69 | });
70 | const form = new FormData();
71 | form.append('name', this.state.name);
72 | form.append('icon', this.fileInput.files[0]);
73 | const response = await fetch('/rest/app', {
74 | credentials: 'include',
75 | method: 'POST',
76 | body: form,
77 | });
78 | if (response.status !== 200) {
79 | if (response.status !== 400) return this.props.onDismiss();
80 | alert((await response.json()).error);
81 | this.props.onDismiss();
82 | }
83 | const app = await response.json();
84 | this.setState({
85 | creating: false,
86 | });
87 | this.props.setApps(app);
88 | this.props.onDismiss();
89 | }
90 | }
91 |
92 | private nameChange = (e) => {
93 | this.setState({
94 | name: e.target.value,
95 | });
96 | }
97 |
98 | private refFile = (input) => {
99 | this.fileInput = input;
100 | }
101 |
102 | render() {
103 | return (
104 | New Application}
106 | footer={
107 |
Cancel
108 |
109 |
Create
110 |
}
111 | isOpen={this.props.isOpen}
112 | onDialogDismissed={this.close}
113 | >
114 |
115 | {
116 | this.state.creating
117 | ?
118 | : null
119 | }
120 |
129 |
130 |
131 |
136 |
137 |
138 |
139 |
140 |
141 | );
142 | }
143 | }
144 |
145 | const mapStateToProps = (state: AppState) => ({
146 | hasPendingMigration: state.migrations.hasPendingMigration,
147 | });
148 |
149 | const mapDispatchToProps = (dispatch: Dispatch) => ({
150 | setApps: (apps: NucleusApp[]) => dispatch(setApps(apps)),
151 | });
152 |
153 | export default connect(mapStateToProps, mapDispatchToProps)(CreateAppModal);
154 |
--------------------------------------------------------------------------------
/typings/index.d.ts:
--------------------------------------------------------------------------------
1 | interface GitHubOptions {
2 | clientID: string;
3 | clientSecret: string;
4 | }
5 |
6 | interface OpenIDOptions {
7 | realm: string;
8 | providerURL: string;
9 | stateless: boolean;
10 | profile: boolean;
11 | domain: string;
12 | photoResolver: (email: string) => string;
13 | }
14 |
15 | interface S3Options {
16 | init?: {
17 | endpoint?: string;
18 | s3ForcePathStyle?: boolean;
19 | }
20 |
21 | bucketName: string;
22 |
23 | cloudfront: {
24 | distributionId: string;
25 | publicUrl: string;
26 | } | null
27 | }
28 |
29 | interface LocalOptions {
30 | root: string;
31 | staticUrl: string;
32 | }
33 |
34 | interface SequelizeOptions {
35 | database: string;
36 | dialect: string;
37 | username: string;
38 | password: string;
39 | host: string;
40 | port: number;
41 | storage: string;
42 | }
43 |
44 | interface LocalUser {
45 | username: string;
46 | password: string;
47 | photo: string;
48 | displayName: string;
49 | }
50 |
51 | type LocalAuthOptions = LocalUser[];
52 |
53 | interface SessionConfig {
54 | type: 'redis' | null;
55 | secret: string;
56 |
57 | redis?: {
58 | host: string;
59 | port: number;
60 | }
61 | }
62 |
63 | interface IConfig {
64 | port: number;
65 | baseURL: string;
66 | fileStrategy: string;
67 | dbStrategy: string;
68 | authStrategy: string;
69 | github: GitHubOptions;
70 | openid: OpenIDOptions;
71 | adminIdentifiers: string[];
72 | s3: S3Options;
73 | local: LocalOptions;
74 | sequelize: SequelizeOptions;
75 | localAuth: LocalAuthOptions;
76 | sessionConfig: SessionConfig;
77 | organization?: string;
78 | gpgSigningKey: string;
79 | defaultRollout: number;
80 | }
81 |
82 | interface User {
83 | id: string;
84 | displayName: string;
85 | photos?: { value: string }[],
86 | isAdmin: boolean;
87 | }
88 |
89 | interface IErrorObject {
90 | [key: string]: string
91 | }
92 |
93 | type AppID = string;
94 | type ChannelID = string;
95 | type UserID = string;
96 | type NucleusPlatform = 'darwin' | 'win32' | 'linux';
97 | type FileType = 'installer' | 'update' | 'unkown';
98 |
99 | interface NucleusApp {
100 | id?: AppID;
101 | name: string;
102 | slug: string;
103 | iconUri: string;
104 | token: string;
105 | channels: NucleusChannel[];
106 | team: UserID[];
107 | webHooks: NucleusWebHook[];
108 | }
109 |
110 | interface NucleusWebHook {
111 | id: number;
112 | url: string;
113 | secret: string;
114 | registered: boolean;
115 | errors: NucleusWebHookError[]
116 | }
117 |
118 | interface NucleusWebHookError {
119 | id: number;
120 | message: string;
121 | responseCode: number;
122 | responseBody: string;
123 | }
124 |
125 | interface NucleusChannel {
126 | id?: ChannelID;
127 | name: string;
128 | versions: NucleusVersion[];
129 | }
130 |
131 | interface NucleusFile {
132 | id?: any;
133 | fileName: string;
134 | arch: string;
135 | platform: NucleusPlatform;
136 | type: FileType;
137 | sha1: string;
138 | sha256: string;
139 | }
140 |
141 | interface NucleusVersion {
142 | name: string;
143 | dead: boolean;
144 | rollout: number;
145 | files: NucleusFile[];
146 | }
147 |
148 | interface NucleusMigration {
149 | key: string;
150 | friendlyName: string;
151 | complete: boolean;
152 | }
153 |
154 | interface ITemporarySave {
155 | id: any;
156 | saveString: string;
157 | version: string;
158 | filenames: string[];
159 | arch: string;
160 | platform: NucleusPlatform;
161 | date: Date;
162 | cipherPassword: string;
163 | }
164 |
165 | interface HandlePlatformUploadOpts {
166 | app: NucleusApp;
167 | channel: NucleusChannel;
168 | internalVersion: NucleusVersion;
169 | file: NucleusFile;
170 | fileData: Buffer;
171 | }
172 |
173 | interface IFileStore {
174 | putFile(key: string, data: Buffer, overwriteExisting?: boolean): Promise;
175 | hasFile(key: string): Promise;
176 | getFile(key: string): Promise;
177 | getFileSize(key: string): Promise;
178 | getPublicBaseUrl(): Promise;
179 | deletePath(key: string): Promise;
180 | listFiles(prefix: string): Promise;
181 | }
182 |
183 | interface HashSet {
184 | sha1: string;
185 | sha256: string;
186 | }
187 |
188 | declare namespace Express {
189 | interface Response {
190 | error(err: IErrorObject): void;
191 | download(path: string): void;
192 | status(code: number): Response;
193 | json(obj: any): Response;
194 | }
195 |
196 | interface Request {
197 | body: any;
198 | targetApp: NucleusApp;
199 | channel: NucleusChannel;
200 | migration: {
201 | internal: NucleusMigration;
202 | migrator: any;
203 | };
204 | }
205 | }
206 |
207 | declare module 'is-png' {
208 | type IsPng = (buffer: Buffer) => boolean;
209 | const foo: IsPng;
210 | export = foo;
211 | }
212 |
213 | declare module 'to-ico' {
214 | type ToIco = (files: Buffer[], options: {
215 | resize?: boolean;
216 | sizes: (16 | 24 | 32 | 48 | 64 | 128 | 256)[];
217 | }) => Promise;
218 | const foo: ToIco;
219 | export = foo;
220 | }
221 |
222 | declare module 'child-process-promise' {
223 | export const spawn: (exe: string, args: string[], opts?: {
224 | cwd?: string;
225 | capture?: string[];
226 | }) => Promise<{
227 | stdout: Buffer;
228 | stderr: Buffer;
229 | }> & { childProcess: any };
230 | }
231 |
--------------------------------------------------------------------------------
/public/assets/Logo.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 |
4 | export default class Logo extends React.PureComponent<{}, {}> {
5 | render() {
6 | return (
7 |
8 |
9 |
10 |
11 |
12 | );
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Nucleus Server
2 |
3 | [](https://travis-ci.org/atlassian/nucleus) [](https://www.npmjs.com/package/nucleus-server) 
4 |
5 | A configurable and versatile update server for all your Electron apps
6 |
7 | ## Features
8 |
9 | * Multiple applications
10 | * Multiple channels for each application
11 | * Companion publisher for [electron-forge](https://github.com/electron-userland/electron-forge) to greatly simplify release publishing
12 | * Backed by a static file store so minimal server costs
13 | * One command to run so insanely simple to set up
14 | * [Staged Rollouts](docs/Staged%20Rollouts.md)
15 | * macOS
16 | * Windows
17 | * [Latest Downloads](docs/Latest%20Releases.md) - Static URL's for downloading the latest version of your application
18 | * Platform Support:
19 | * macOS
20 | * Windows
21 | * Linux - RedHat
22 | * Linux - Debian
23 |
24 | ## Electron Version Requirements
25 |
26 | Please note that using Nucleus requires that you use Electron `>=2.0.0`.
27 |
28 | ## Setup
29 |
30 | ### Docker
31 |
32 | You'll need to set up your own docker image using a Dockefile like below.
33 |
34 | ```docker
35 | FROM atlassian/nucleus
36 |
37 | COPY config.js /opt/service/config.js
38 | ```
39 |
40 | Then running your built docker image will run nucleus on port 8080.
41 |
42 | ### Manual
43 |
44 | ```bash
45 | git clone https://github.com/atlassian/nucleus.git nucleus-server
46 | cd nucleus-server
47 | cp config.template.js config.js
48 | yarn
49 | yarn dev
50 | ```
51 |
52 | This will launch Nucleus running on your local machine with a local
53 | file store and a SQLite database.
54 |
55 | ## Configuration
56 |
57 | All the config options are thoroughly documented and explained in the
58 | [config.template.js](config.template.js) file in this repository.
59 |
60 | ## Uploading Releases
61 |
62 | Release uploading is explained inside Nucleus itself, for more advanced
63 | information check out the [Uploading Docs](docs/Uploading.md).
64 |
65 | ## More Information
66 |
67 | Please see the following documents for more information on Nucleus and how it works.
68 |
69 | * [Internal Endpoints](docs/Endpoints.md)
70 | * [Uploading Releases](docs/Uploading.md)
71 | * [Architecture](docs/Architecture.md)
72 | * [Versioned Public API](docs/API.md)
73 | * [Staged Rollouts](docs/Staged%20Rollouts.md)
74 | * [Latest Releases](docs/Latest%20Releases.md)
75 |
76 | ## FAQ
77 |
78 | ### Why does this use a static file store, why not use a traditional update server?
79 |
80 | $$$, static file stores quite simply cost less to run than arrays of update servers
81 |
82 | ### Can I use CloudFront to speed up my downloads?
83 |
84 | Yes, check out the CloudFront section of the S3 config inside [config.template.js](config.template.js).
85 |
86 | ### How do I switch to this from Update Server X?
87 |
88 | Switching update servers in an Electron app is quite simple
89 |
90 | 1. Modify your autoUpdater code to point to this server (follow the instructions
91 | on your app page inside Nucleus)
92 | 2. Release a new update for your application on your existing update server with this change
93 | 3. Release all future updates on Nucleus :)
94 |
95 | ### Is this really awesome?
96 |
97 | Pretty sure it is :D
98 |
99 | ### How do I set this up in a production environment?
100 |
101 | You can use the published version of this module `nucleus-server` which has
102 | an exported CLI command (`nucleus`). You then run the command with the first
103 | argument being a path to your config file. E.g.
104 |
105 | ```bash
106 | NODE_ENV=production nucleus path/to/config.js
107 | ```
108 |
109 | Please ensure you add redis session config and a proper (not local) authentication
110 | method when running in a production environment.
111 |
112 | To enable logging you need to set `DEBUG=nucleus*`.
113 |
114 | ## System Requirements
115 |
116 | * Node >= 8
117 | * Yarn
118 | * Linux
119 | * `createrepo`
120 | * `rpmsign`
121 | * `dpkg-scanpackages`
122 | * `dpkg-scansources`
123 | * `gpg`
124 | * `apt-ftparchive`
125 | * macOS / Windows
126 | * `docker`
127 | * `gpg`
128 |
129 | ## Contributors
130 |
131 | Pull requests, issues and comments welcome. For pull requests:
132 |
133 | * Add tests for new features and bug fixes
134 | * Follow the existing style
135 | * Separate unrelated changes into multiple pull requests
136 |
137 | See the existing issues for things to start contributing.
138 |
139 | For bigger changes, make sure you start a discussion first by creating
140 | an issue and explaining the intended change.
141 |
142 | Atlassian requires contributors to sign a Contributor License Agreement,
143 | known as a CLA. This serves as a record stating that the contributor is
144 | entitled to contribute the code/documentation/translation to the project
145 | and is willing to have it used in distributions and derivative works
146 | (or is willing to transfer ownership).
147 |
148 | Prior to accepting your contributions we ask that you please follow the appropriate
149 | link below to digitally sign the CLA. The Corporate CLA is for those who are
150 | contributing as a member of an organization and the individual CLA is for
151 | those contributing as an individual.
152 |
153 | * [CLA for corporate contributors](https://na2.docusign.net/Member/PowerFormSigning.aspx?PowerFormId=e1c17c66-ca4d-4aab-a953-2c231af4a20b)
154 | * [CLA for individuals](https://na2.docusign.net/Member/PowerFormSigning.aspx?PowerFormId=3f94fbdc-2fbe-46ac-b14c-5d152700ae5d)
155 |
156 | ## Team
157 |
158 | | [](https://samuelattard.com) |
159 | |---|
160 | | [Samuel Attard](https://samuelattard.com) |
161 |
162 | ## License
163 |
164 | Apache 2.0 © Atlassian Pty Ltd
165 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import 'colors';
4 | import * as bodyParser from 'body-parser';
5 | import * as compression from 'compression';
6 | import * as debug from 'debug';
7 | import * as express from 'express';
8 | import * as fs from 'fs-extra';
9 | import * as path from 'path';
10 |
11 | import { createA } from './utils/a';
12 | import { port, gpgSigningKey, localAuth } from './config';
13 | import driver from './db/driver';
14 | import store from './files/store';
15 | import adminRouter from './rest/admin';
16 | import appRouter from './rest/app';
17 | import migrationRouter from './rest/migration';
18 | import { authenticateRouter, setupApp } from './rest/auth';
19 | import { isGpgKeyValid } from './files/utils/gpg';
20 | import { registerMigrations } from './migrations';
21 | import { MigrationStore } from './migrations/BaseMigration';
22 |
23 | const d = debug('nucleus');
24 | const a = createA(d);
25 |
26 | const app = express();
27 |
28 | app.use(compression());
29 |
30 | app.use(express.static(path.resolve(__dirname, '..', 'public_out')));
31 |
32 | app.use(bodyParser.json());
33 |
34 | // THIS IS VERY DANGEROUS, WE USE IT TO BYPASS AUTH IN TESTING
35 | if (process.env.UNSAFELY_DISABLE_NUCLEUS_AUTH) {
36 | d('You have set UNSAFELY_DISABLE_NUCLEUS_AUTH. THIS IS VERY DANGEROUS');
37 | app.use((req, res, next) => {
38 | if (!req.user) {
39 | const user = localAuth[0];
40 | req.user = {
41 | id: user.username,
42 | displayName: user.displayName,
43 | isAdmin: true,
44 | photos: [
45 | { value: user.photo },
46 | ],
47 | };
48 | }
49 | next();
50 | });
51 | }
52 |
53 | app.use((req, res, next) => {
54 | res.error = (err) => {
55 | d('An error occurred inside Nucleus:', err);
56 | res.status(500).send();
57 | };
58 | next();
59 | });
60 |
61 | const restRouter = express();
62 | restRouter.get('/deepcheck', async (req, res) => {
63 | d('Running DeepCheck');
64 | const dead = (reason: string) => {
65 | res.status(500).json({ reason, alive: false });
66 | };
67 | // Ensure we can connect to the DB
68 | try {
69 | await driver.ensureConnected();
70 | } catch (err) {
71 | d('DeepCheck failed, could not connect to database', err);
72 | return dead('database');
73 | }
74 | // Ensure we can use the file store
75 | try {
76 | const content = `healthy_${Date.now()}`;
77 | await store.putFile('__deepcheck', Buffer.from(content), true);
78 | const fetchedContent = await store.getFile('__deepcheck');
79 | if (fetchedContent.toString() !== content) {
80 | d('DeepCheck failed, file store retrieved contents did not match put contents');
81 | return dead('file_store_logic');
82 | }
83 | await store.deletePath('__deepcheck');
84 | } catch (err) {
85 | d('DeepCheck failed, could not store, retrieve of delete file from file store', err);
86 | return dead('file_store');
87 | }
88 | // All good here
89 | res.json({ alive: true });
90 | });
91 | restRouter.get('/healthcheck', (req, res) => res.json({ alive: true }));
92 | restRouter.use('/app', appRouter);
93 | restRouter.use('/auth', authenticateRouter);
94 | restRouter.use('/migration', migrationRouter);
95 | restRouter.use('/admin', (req, res, next) => {
96 | if (req.user && req.user.isAdmin) return next();
97 | return res.status(403).json({ error: 'Not an admin' });
98 | }, adminRouter);
99 | setupApp(app);
100 |
101 | restRouter.get('/config', a(async (req, res) => {
102 | const migrations = (await driver.getMigrations()).map(m => (m as any).get());
103 | for (const migration of migrations) {
104 | (migration as any).dependsOn = MigrationStore.get(migration.key)!.dependsOn;
105 | }
106 |
107 | res.json({
108 | migrations,
109 | user: req.user,
110 | baseUpdateUrl: await store.getPublicBaseUrl(),
111 | });
112 | }));
113 |
114 | app.use('/rest', restRouter);
115 |
116 | let contentPromise: Promise | null;
117 |
118 | app.use('*', a(async (req, res) => {
119 | if (!contentPromise) {
120 | contentPromise = fs.readFile(path.resolve(__dirname, '../public_out/index.html'), 'utf8');
121 | }
122 | res.send(await contentPromise);
123 | }));
124 |
125 | restRouter.use('*', (req, res) => {
126 | res.status(404).json({
127 | error: 'Unknown Path',
128 | });
129 | });
130 |
131 | d('Setting up server');
132 | (async () => {
133 | d('Connecting to DB');
134 | try {
135 | await driver.ensureConnected();
136 | } catch (err) {
137 | d('Failed to connect to DB');
138 | d(err);
139 | return;
140 | }
141 | d('Checking GPG key');
142 | if (!await isGpgKeyValid()) {
143 | d('Bad gpg key, invalid');
144 | console.error('GPG key is invalid or missing, you must provide "config.gpgSigningKey"'.red);
145 | process.exit(1);
146 | }
147 | if (!gpgSigningKey.includes('-----BEGIN PGP PUBLIC KEY BLOCK-----')) {
148 | d('Bad gpg key, no public key');
149 | console.error('GPG key does not contain a public key, you must include both the public and private key in "config.gpgSigningKey"'.red);
150 | process.exit(1);
151 | }
152 | if (!gpgSigningKey.includes('-----BEGIN PGP PRIVATE KEY BLOCK-----')) {
153 | d('Bad gpg key, no public key');
154 | console.error('GPG key does not contain a private key, you must include both the public and private key in "config.gpgSigningKey"'.red);
155 | process.exit(1);
156 | }
157 | d('Good gpg key');
158 | d('Initializing public GPG key');
159 | await store.putFile(
160 | 'public.key',
161 | Buffer.from(gpgSigningKey.split('-----BEGIN PGP PRIVATE KEY BLOCK-----')[0]),
162 | true,
163 | );
164 | d('GPG key now public at:', `${await store.getPublicBaseUrl()}/public.key`);
165 | d('registering migrations');
166 | await registerMigrations();
167 | d('migrations all registered');
168 | app.listen(port, () => {
169 | d('Nucleus Server started on port:', port);
170 | });
171 | })().catch((err) => {
172 | if (typeof err === 'string') {
173 | console.error(err.red);
174 | } else {
175 | console.error(err);
176 | }
177 | process.exit(1);
178 | });
179 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "nucleus-server",
3 | "version": "1.1.3",
4 | "description": "Nucleus update server",
5 | "main": "lib/index.js",
6 | "bin": {
7 | "nucleus": "./lib/index.js"
8 | },
9 | "scripts": {
10 | "build:server": "tsc && rm -rf lib/__spec__ lib/**/__spec__",
11 | "build:fe:dev": "webpack --config webpack.config.js",
12 | "build:fe:prod": "webpack --config webpack.production.config.js",
13 | "clean": "rm -rf lib && rm -rf public_out",
14 | "commit": "git-cz",
15 | "dev": "concurrently --raw \"npm run start:fe\" \"npm run start:server:dev\" \"npm run start:static\"",
16 | "lint": "tslint -c tslint.json -p tsconfig.json --type-check && tslint -c tslint.json -p tsconfig.public.json --type-check",
17 | "prepublishOnly": "npm run clean && npm run lint && npm run build:server && npm run build:fe:prod",
18 | "release:patch": "changelog -p && git add CHANGELOG.md && git commit -m \"updated CHANGELOG.md\" && npm version patch && git push && git push --tags",
19 | "release:minor": "changelog -m && git add CHANGELOG.md && git commit -m \"updated CHANGELOG.md\" && npm version minor && git push && git push --tags",
20 | "release:major": "changelog -M && git add CHANGELOG.md && git commit -m \"updated CHANGELOG.md\" && npm version major && git push && git push --tags",
21 | "start": "npm run start:server:prod",
22 | "start:fe": "webpack-dev-server --progress --profile --colors --quiet --open",
23 | "start:server:dev": "nodemon --watch src --exec \"npm run build:server && cross-env DEBUG=nucleus* npm run start:server:prod\" -e ts",
24 | "start:server:prod": "node lib/index.js",
25 | "start:static": "make-dir .files && serve .files --port 9999 --silent",
26 | "test": "mocha --compilers ts:ts-node/register src/__spec__/rest.ts src/**/__spec__/*_spec.ts src/**/**/__spec__/*_spec.ts"
27 | },
28 | "author": "Samuel Attard",
29 | "license": "Apache-2.0",
30 | "dependencies": {
31 | "aws-sdk": "^2.119.0",
32 | "body-parser": "^1.17.2",
33 | "child-process-promise": "^2.2.1",
34 | "colors": "^1.2.5",
35 | "compression": "^1.7.1",
36 | "connect-redis": "^3.3.2",
37 | "debug": "^2.6.8",
38 | "express": "^4.15.3",
39 | "express-session": "^1.15.3",
40 | "fs-extra": "^3.0.1",
41 | "hat": "^0.0.3",
42 | "is-png": "^1.1.0",
43 | "multer": "^1.3.0",
44 | "mysql2": "^1.4.2",
45 | "node-fetch": "^1.7.3",
46 | "node-rsa": "^0.4.2",
47 | "passport": "^0.3.2",
48 | "passport-github": "^1.1.0",
49 | "passport-http": "^0.3.0",
50 | "passport-openid": "^0.4.0",
51 | "pg": "^7.3.0",
52 | "pg-hstore": "^2.3.2",
53 | "pify": "^3.0.0",
54 | "reflect-metadata": "^0.1.10",
55 | "semver": "^5.4.1",
56 | "sequelize": "^4.11.1",
57 | "sequelize-typescript": "^0.5.0",
58 | "sqlite3": "^3.1.12",
59 | "tedious": "^2.0.0",
60 | "to-ico": "^1.1.5"
61 | },
62 | "devDependencies": {
63 | "@atlaskit/avatar": "^4.0.5",
64 | "@atlaskit/banner": "^6.1.2",
65 | "@atlaskit/button": "^5.4.0",
66 | "@atlaskit/css-reset": "^1.1.4",
67 | "@atlaskit/droplist": "^4.0.1",
68 | "@atlaskit/field-base": "^8.1.1",
69 | "@atlaskit/field-text": "^4.0.1",
70 | "@atlaskit/icon": "^6.6.0",
71 | "@atlaskit/modal-dialog": "^2.6.0",
72 | "@atlaskit/multi-select": "^7.1.3",
73 | "@atlaskit/navigation": "^13.0.2",
74 | "@atlaskit/spinner": "^2.2.3",
75 | "@atlaskit/tabs": "^4.0.0",
76 | "@atlaskit/tooltip": "^6.2.0",
77 | "@navjobs/upload": "^3.1.3",
78 | "@types/body-parser": "^1.16.3",
79 | "@types/chai": "^4.0.4",
80 | "@types/chai-http": "^3.0.4",
81 | "@types/compression": "^0.0.34",
82 | "@types/connect-redis": "^0.0.6",
83 | "@types/debug": "0.0.29",
84 | "@types/express": "^4.0.35",
85 | "@types/express-session": "^1.15.0",
86 | "@types/fs-extra": "^3.0.2",
87 | "@types/mocha": "^2.2.43",
88 | "@types/multer": "^1.3.6",
89 | "@types/node": "^7.0.22",
90 | "@types/node-fetch": "^1.6.7",
91 | "@types/passport": "^0.3.3",
92 | "@types/passport-github": "^1.1.0",
93 | "@types/passport-http": "^0.3.2",
94 | "@types/pify": "0.0.28",
95 | "@types/react": "^16.0.20",
96 | "@types/react-dom": "^16.0.2",
97 | "@types/react-hot-loader": "^3.0.1",
98 | "@types/react-redux": "^4.4.41",
99 | "@types/react-router": "^2.0.0",
100 | "@types/redux": "^3.6.0",
101 | "@types/semver": "^5.4.0",
102 | "@types/sinon": "^2.3.5",
103 | "@types/webpack-env": "^1.13.0",
104 | "awesome-typescript-loader": "^3.1.3",
105 | "chai": "^4.1.2",
106 | "chai-http": "^4.0.0",
107 | "commitizen": "^2.9.6",
108 | "concurrently": "^3.5.0",
109 | "cross-env": "^5.0.0",
110 | "css-loader": "^0.28.4",
111 | "cz-customizable": "^5.2.0",
112 | "extract-text-webpack-plugin": "^2.1.0",
113 | "favicons-webpack-plugin": "^0.0.7",
114 | "file-loader": "^0.11.1",
115 | "generate-changelog": "^1.6.0",
116 | "highlight.js": "^9.12.0",
117 | "html-webpack-plugin": "^2.28.0",
118 | "make-dir-cli": "^1.0.0",
119 | "mocha": "^3.5.3",
120 | "node-sass": "^4.5.3",
121 | "nodemon": "^1.11.0",
122 | "postcss-loader": "^2.0.5",
123 | "react": "^15.5.4",
124 | "react-dom": "^15.5.4",
125 | "react-hot-loader": "3.0.0-beta.7",
126 | "react-redux": "^5.0.5",
127 | "react-router": "^2.6.0",
128 | "redux": "^3.6.0",
129 | "sass-loader": "^6.0.5",
130 | "serve": "^6.1.0",
131 | "serve-handler": "^3.2.2",
132 | "sinon": "^4.0.0",
133 | "style-loader": "^0.18.1",
134 | "styled-components": "1.4.6",
135 | "ts-node": "^3.3.0",
136 | "tslint": "^5.7.0",
137 | "tslint-config-airbnb": "^5.3.0",
138 | "typescript": "^3.1.6",
139 | "url-loader": "^0.5.8",
140 | "webpack": "^2.6.1",
141 | "webpack-bundle-analyzer": "^2.9.0",
142 | "webpack-cleanup-plugin": "^0.5.1",
143 | "webpack-dashboard": "^0.4.0",
144 | "webpack-dev-server": "^2.4.5"
145 | },
146 | "config": {
147 | "commitizen": {
148 | "path": "./node_modules/cz-customizable"
149 | },
150 | "cz-customizable": {
151 | "config": "./.cz.js"
152 | }
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/src/migrations/latest-installer/__spec__/LatestInstallerMigration_spec.ts:
--------------------------------------------------------------------------------
1 | import { expect } from 'chai';
2 | import * as fs from 'fs-extra';
3 | import * as os from 'os';
4 | import * as path from 'path';
5 | import { SinonStub, stub } from 'sinon';
6 |
7 | import LatestInstallerMigration from '../LatestInstallerMigration';
8 | import LocalStore from '../../../files/local/LocalStore';
9 |
10 | const fakeApp = {
11 | slug: 'app',
12 | name: 'App',
13 | channels: [{
14 | id: 'channel',
15 | versions: [{
16 | name: '0.0.1',
17 | rollout: 100,
18 | files: [{
19 | platform: 'darwin',
20 | arch: 'x64',
21 | fileName: 'Foo.pkg',
22 | type: 'installer',
23 | }, {
24 | platform: 'darwin',
25 | arch: 'x64',
26 | fileName: 'App1.dmg',
27 | type: 'installer',
28 | }],
29 | }, {
30 | name: '0.0.2',
31 | rollout: 100,
32 | files: [{
33 | platform: 'darwin',
34 | arch: 'x64',
35 | fileName: 'App2.dmg',
36 | type: 'installer',
37 | }, {
38 | platform: 'win32',
39 | arch: 'ia32',
40 | fileName: 'App.exe',
41 | type: 'installer',
42 | }, {
43 | platform: 'linux',
44 | arch: 'x64',
45 | fileName: 'App.deb',
46 | type: 'installer',
47 | }, {
48 | platform: 'darwin',
49 | arch: 'x64',
50 | fileName: 'Test.zip',
51 | type: 'update',
52 | }],
53 | }, {
54 | name: '0.0.3',
55 | rollout: 99,
56 | files: [{
57 | platform: 'win32',
58 | arch: 'ia32',
59 | fileName: 'App3.exe',
60 | type: 'installer',
61 | }],
62 | }],
63 | }],
64 | };
65 |
66 | describe('LatestInstallerMigration', () => {
67 | let dir: string;
68 | let store: LocalStore;
69 | let localConfig: LocalOptions;
70 | let migrator: LatestInstallerMigration;
71 | let fakeDriver: {
72 | getApps: SinonStub;
73 | };
74 |
75 | beforeEach(async () => {
76 | dir = await fs.mkdtemp(path.join(os.tmpdir(), '/'));
77 | localConfig = {
78 | root: dir,
79 | staticUrl: 'https://static.url.com/thing',
80 | };
81 | store = new LocalStore(localConfig);
82 | fakeDriver = {
83 | getApps: stub(),
84 | };
85 | migrator = new LatestInstallerMigration(store, fakeDriver as any);
86 | });
87 |
88 | afterEach(async () => {
89 | await fs.remove(dir);
90 | });
91 |
92 | describe('getItems', () => {
93 | it('should return an empty array when there are no files', async () => {
94 | fakeDriver.getApps.returns(Promise.resolve([]));
95 | expect(await migrator.getItems()).to.deep.equal([]);
96 | });
97 |
98 | it('should check for existence of ref files and mark as done appropriately', async () => {
99 | fakeDriver.getApps.returns(Promise.resolve([fakeApp]));
100 | const getFileStub = stub(store, 'getFile');
101 | getFileStub.returns(Promise.resolve(Buffer.from('0.0.0')));
102 | getFileStub.onCall(1).returns(Promise.resolve('0.0.2'));
103 | const items = await migrator.getItems();
104 | expect(items.length).to.equal(4);
105 | expect(items[0].done).to.equal(false);
106 | expect(items[1].done).to.equal(true, 'a file whos latest is already there should be marked as done');
107 | expect(items[2].done).to.equal(false);
108 | expect(items[3].done).to.equal(false);
109 | });
110 |
111 | it('should not use non-100 rollout files', async () => {
112 | fakeDriver.getApps.returns(Promise.resolve([fakeApp]));
113 | const getFileStub = stub(store, 'getFile');
114 | getFileStub.returns(Promise.resolve(Buffer.from('0.0.0')));
115 | const items = await migrator.getItems();
116 | expect(items.some(item => item.data.version === '0.0.3')).to.equal(false, 'should not use a non-100 rollout');
117 | });
118 |
119 | it('should use the latest version of an installer when there are duplicates', async () => {
120 | fakeDriver.getApps.returns(Promise.resolve([fakeApp]));
121 | const getFileStub = stub(store, 'getFile');
122 | getFileStub.returns(Promise.resolve(Buffer.from('0.0.0')));
123 | const items = await migrator.getItems();
124 | const dmgItem = items.find(item => item.data.latestKey.endsWith('.dmg'))!;
125 | expect(dmgItem).to.not.equal(null);
126 | expect(dmgItem.data).to.have.property('version', '0.0.2');
127 | });
128 |
129 | it('should not use any update type files', async () => {
130 | fakeDriver.getApps.returns(Promise.resolve([fakeApp]));
131 | const getFileStub = stub(store, 'getFile');
132 | getFileStub.returns(Promise.resolve(Buffer.from('0.0.0')));
133 | const items = await migrator.getItems();
134 | const zipItem = items.find(item => item.data.latestKey.endsWith('.zip'));
135 | expect(zipItem).to.equal(undefined);
136 | });
137 | });
138 |
139 | describe('runOnItem', () => {
140 | it('should do no work if the item is flagged as done', async () => {
141 | const getFile = stub(store, 'getFile');
142 | await migrator.runOnItem({
143 | done: true,
144 | data: {} as any,
145 | });
146 | expect(getFile.callCount).to.equal(0);
147 | });
148 |
149 | it('should copy index file the latest file if the item is flagged as not done', async () => {
150 | const getFile = stub(store, 'getFile');
151 | getFile.returns(Promise.resolve(Buffer.from('test 123')));
152 | const putFile = stub(store, 'putFile');
153 | putFile.returns(Promise.resolve(true));
154 | await migrator.runOnItem({
155 | done: false,
156 | data: {
157 | latestKey: 'latest/key/to/copy/to',
158 | indexKey: 'index/key/to/copy/from',
159 | version: '1.0.0',
160 | },
161 | });
162 | expect(getFile.callCount).to.equal(1);
163 | expect(getFile.firstCall.args[0]).to.equal('index/key/to/copy/from');
164 | expect(putFile.callCount).to.equal(2);
165 | expect(putFile.firstCall.args[0]).to.equal('latest/key/to/copy/to');
166 | expect(putFile.firstCall.args[1].toString()).to.equal('test 123');
167 | expect(putFile.secondCall.args[0]).to.equal('latest/key/to/copy/to.ref');
168 | expect(putFile.secondCall.args[1].toString()).to.equal('1.0.0');
169 | });
170 | });
171 | });
172 |
--------------------------------------------------------------------------------
/config.template.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 |
3 | module.exports = {
4 | /**
5 | * The port to run Nucleus Server on, if the port is in use the server will not start
6 | */
7 | port: 3030,
8 |
9 | /**
10 | * The fully qualified domain + path that Nucleus is being hosted at
11 | */
12 | baseURL: 'http://localhost:8888',
13 |
14 | /**
15 | * The data store to use when persisting plugins and versions. Current possible values
16 | * are "sequelize", ensure you also supply valid connection details for your
17 | * chosen strategy below.
18 | *
19 | * PR's welcome to add another data store.
20 | */
21 | dbStrategy: 'sequelize',
22 |
23 | /**
24 | * Sequelize connection information, please note all options are required
25 | *
26 | * database: The name of the database to connect to
27 | * dialect: The type of SQL database this is, check sequelize docs for more info
28 | * username: Username to use when connecting
29 | * password; Password to use when connecting
30 | * host: Hostname of database
31 | * port: Port to use when connecting
32 | * storage: Path to sqlite file, only used for sqlite dialect
33 | */
34 | sequelize: {
35 | dialect: 'sqlite',
36 | storage: path.resolve(__dirname, 'db.sqlite'),
37 | },
38 |
39 | /**
40 | * The file store to use when persisting update files and metadata. Current possible
41 | * values are "s3" and "local" ensure you also supply valid connection details if
42 | * required for your chosen strategy below.
43 | *
44 | * PR's welcome to add another file store.
45 | */
46 | fileStrategy: 'local',
47 |
48 | /**
49 | * Local file configuration
50 | *
51 | * root: Path on disk to the root of the static file store
52 | * staticUrl: The HTTP url to use to access the static file store remotely
53 | */
54 | local: {
55 | root: path.resolve(__dirname, '.files'),
56 | staticUrl: 'http://localhost:9999'
57 | },
58 |
59 | /**
60 | * There is actually no authentication config for s3, all config must be done through the standard AWS
61 | * environment variables or through EC2 IAM roles.
62 | *
63 | * See http://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-credentials-node.html
64 | *
65 | * Bucket / Region / CloudFront config goes here though
66 | */
67 | s3: {
68 | // init: {
69 | // endpoint: '' // The alternate endpoint to reach the S3 instance at,
70 | // s3ForcePathStyle: true // Always use path style URLs
71 | // }
72 |
73 | bucketName: '', // The name for your S3 Bucket
74 |
75 | cloudfront: { // If you don't have CloudFront set up and just want to use the S3 bucket set this to "null
76 | distributionId: '', // The CloudFront distribution ID, used for invalidating files
77 | publicUrl: '', // Fully qualified URL for the root of the CloudFront proxy for the S3 bucket
78 | }
79 | },
80 |
81 | /**
82 | * The authentication strategy to use when logging users in. Current possible values are "local",
83 | * "openid" and "github". Make you also supply the required authentication details
84 | */
85 | authStrategy: 'local',
86 |
87 | /**
88 | * Local authentication details
89 | *
90 | * The `adminIdentifiers` array should be a list of usernames
91 | *
92 | * DISCLAIMER: This strategy should ONLY be used for local development and NEVER
93 | * used in production. Unicorns cry every time this setting is used in production.
94 | * Don't make the unicorns cry.
95 | *
96 | * displayName: The user friendly name of this user
97 | * username: A unique identifier to use when this user signs in, please note uniqueness is
98 | * not enforced
99 | * password: Well, uhhh, their password
100 | * photo: A URL for their profile, entirely optional, just makes things look nicer ;)
101 | */
102 | localAuth: [{
103 | displayName: 'Charlie',
104 | username: 'charlie',
105 | password: 'charlie',
106 | photo: 'https://pbs.twimg.com/profile_images/1219364727/charlie-support_400x400.png'
107 | }],
108 |
109 | /**
110 | * OpenID authentication details
111 | *
112 | * The `adminIdentifiers` array should be a list of email
113 | * addresses for users to consider admins
114 | *
115 | * realm: The domain that the server is hosted on
116 | * stateless: Stateless mode for openID
117 | * profile: Whether to fetch profile information, should normally be true
118 | * providerURL: Your openID provider URL
119 | * domain: Domain to restrict email addresses to
120 | */
121 | openid: {
122 | realm: 'http://localhost:8888',
123 | stateless: true,
124 | profile: true,
125 | providerURL: 'https://auth.myservice.com/openid/v2/op',
126 | domain: 'myservice.com'
127 | },
128 |
129 | /**
130 | * GitHub authentication details
131 | *
132 | * The `adminIdentifiers` array should be a list of GitHub usernames
133 | * to consider admins
134 | *
135 | * clientID: GitHub API client ID
136 | * clientSecret: GitHub API clientSecret
137 | * realm: The domain the server is hosted on
138 | */
139 | github: {
140 | clientID: '',
141 | clientSecret: ''
142 | },
143 |
144 | /**
145 | * See the documentation for your authentication strategy for what this array does
146 | */
147 | adminIdentifiers: ['admin@yourdomain.com', 'charlie'],
148 |
149 | /**
150 | * Session options, in development just leave this as default.
151 | *
152 | * IN PRODUCTION PLEASE USE REDIS!
153 | *
154 | * type: Can be either "redis" or null
155 | *
156 | * redis:
157 | * host: The host URL for the redis instance
158 | * port: The port for the redis instance
159 | */
160 | sessionConfig: {
161 | type: null,
162 | secret: 'ThisIsNotSecret',
163 |
164 | redis: {
165 | host: '',
166 | port: ''
167 | }
168 | },
169 |
170 | organization: 'My Company Here',
171 |
172 | /**
173 | * GPG key to use when signing APT and YUM releases
174 | *
175 | * Requires to be unlocked (no password) and have both the private and
176 | * public key.
177 | */
178 | gpgSigningKey: 'GPG KEY HERE',
179 |
180 | /**
181 | * The default percentage rollout for new releases. The first release for
182 | * any channel will always be 100% but all future releases will have a
183 | * default rollout value of this setting
184 | */
185 | defaultRollout: 0
186 | };
187 |
--------------------------------------------------------------------------------
/src/db/sequelize/models/index.ts:
--------------------------------------------------------------------------------
1 | import { Table, Column, Model, HasMany, Unique, BelongsTo, Sequelize, DataType, ForeignKey } from 'sequelize-typescript';
2 |
3 | import * as debug from 'debug';
4 |
5 | import * as config from '../../../config';
6 | import { QueryInterface } from 'sequelize';
7 |
8 | @Table
9 | export class App extends Model {
10 | @Column(DataType.STRING)
11 | name: string;
12 |
13 | @Column(DataType.STRING)
14 | slug: string;
15 |
16 | @Column(DataType.STRING)
17 | token: string;
18 |
19 | @HasMany(() => TeamMember)
20 | team: TeamMember[];
21 |
22 | @HasMany(() => Channel)
23 | channels: Channel[];
24 |
25 | @HasMany(() => WebHook)
26 | webHooks: WebHook[];
27 | }
28 |
29 | @Table
30 | export class WebHook extends Model {
31 | @Column(DataType.STRING)
32 | url: string;
33 |
34 | @Column(DataType.STRING)
35 | secret: string;
36 |
37 | @Column(DataType.BOOLEAN)
38 | registered: boolean;
39 |
40 | @ForeignKey(() => App)
41 | @Column(DataType.INTEGER)
42 | appId: number;
43 |
44 | @BelongsTo(() => App)
45 | app: App;
46 |
47 | @HasMany(() => WebHookError)
48 | errors: WebHookError[];
49 | }
50 |
51 | @Table
52 | export class WebHookError extends Model {
53 | @Column(DataType.STRING(1000))
54 | message: string;
55 |
56 | @Column(DataType.INTEGER)
57 | responseCode: number;
58 |
59 | @Column(DataType.STRING(10000))
60 | responseBody: string;
61 |
62 | @ForeignKey(() => WebHook)
63 | @Column(DataType.INTEGER)
64 | webHookId: number;
65 |
66 | @BelongsTo(() => WebHook)
67 | webHook: WebHook;
68 | }
69 |
70 | @Table
71 | export class TeamMember extends Model {
72 | @Column(DataType.STRING)
73 | userId: string;
74 |
75 | @ForeignKey(() => App)
76 | @Column(DataType.INTEGER)
77 | appId: number;
78 |
79 | @BelongsTo(() => App)
80 | app: App;
81 | }
82 |
83 | @Table
84 | export class Channel extends Model {
85 | @Unique
86 | @Column(DataType.STRING)
87 | stringId: string;
88 |
89 | @Column(DataType.STRING)
90 | name: string;
91 |
92 | @ForeignKey(() => App)
93 | @Column(DataType.INTEGER)
94 | appId: number;
95 |
96 | @BelongsTo(() => App)
97 | app: App;
98 |
99 | @HasMany(() => Version)
100 | versions: Version[];
101 |
102 | @HasMany(() => TemporarySave)
103 | temporarySaves: TemporarySave[];
104 | }
105 | // version: string, filenames: string[], arch: string, platform: NucleusPlatform//
106 | @Table
107 | export class TemporarySave extends Model {
108 | @Unique
109 | @Column(DataType.STRING)
110 | saveString: string;
111 |
112 | @Column(DataType.STRING)
113 | version: string;
114 |
115 | @Column(DataType.STRING)
116 | arch: string;
117 |
118 | @Column(DataType.STRING)
119 | platform: string;
120 |
121 | @Column(DataType.DATE)
122 | date: Date;
123 |
124 | @Column(DataType.STRING)
125 | cipherPassword: string;
126 |
127 | @ForeignKey(() => Channel)
128 | @Column(DataType.INTEGER)
129 | channelId: number;
130 |
131 | @BelongsTo(() => Channel)
132 | channel: Channel;
133 |
134 | @HasMany(() => TemporarySaveFile)
135 | files: TemporarySaveFile[];
136 | }
137 |
138 | @Table
139 | export class TemporarySaveFile extends Model {
140 | @Column(DataType.STRING)
141 | name: string;
142 |
143 | @ForeignKey(() => TemporarySave)
144 | @Column(DataType.INTEGER)
145 | temporarySaveId: number;
146 |
147 | @BelongsTo(() => TemporarySave)
148 | temporarySave: TemporarySave;
149 | }
150 |
151 | @Table
152 | export class Version extends Model {
153 | @Column(DataType.STRING)
154 | name: string;
155 |
156 | @Column(DataType.BOOLEAN)
157 | dead: boolean;
158 |
159 | @Column(DataType.INTEGER)
160 | rollout: number;
161 |
162 | @ForeignKey(() => Channel)
163 | @Column(DataType.INTEGER)
164 | channelId: number;
165 |
166 | @BelongsTo(() => Channel)
167 | channel: Channel;
168 |
169 | @HasMany(() => File)
170 | files: File[];
171 | }
172 |
173 | @Table
174 | export class File extends Model {
175 | @Column(DataType.STRING)
176 | fileName: string;
177 |
178 | @Column(DataType.STRING)
179 | platform: string;
180 |
181 | @Column(DataType.STRING)
182 | arch: string;
183 |
184 | @Column(DataType.STRING)
185 | type: string;
186 |
187 | @Column(DataType.STRING({ length: 40 }))
188 | sha1: string;
189 |
190 | @Column(DataType.STRING({ length: 64 }))
191 | sha256: string;
192 |
193 | @ForeignKey(() => Version)
194 | @Column(DataType.INTEGER)
195 | versionId: number;
196 |
197 | @BelongsTo(() => Version)
198 | version: Version;
199 | }
200 |
201 | @Table
202 | export class Migration extends Model implements NucleusMigration {
203 | @Column(DataType.STRING)
204 | key: string;
205 |
206 | @Column(DataType.STRING)
207 | friendlyName: string;
208 |
209 | @Column(DataType.BOOLEAN)
210 | complete: boolean;
211 | }
212 |
213 | const d = debug('nucleus:db:migrator');
214 |
215 | function createAddColumnMigration(columnName: string, table: typeof Model, defaultValue: T) {
216 | return async function addColumnToTable(queryInterface: QueryInterface) {
217 | const description = await queryInterface.describeTable(table.getTableName());
218 | if (Object.keys(description).indexOf(columnName) === -1) {
219 | await queryInterface.addColumn(table.getTableName() as string, columnName, {
220 | type: (table as any).attributes[columnName].type,
221 | });
222 | await table.update({
223 | [columnName]: defaultValue,
224 | }, {
225 | where: {
226 | [columnName]: {
227 | $eq: null,
228 | },
229 | },
230 | });
231 | d(`adding the ${columnName} column to the ${table.getTableName()} table`);
232 | }
233 | };
234 | }
235 |
236 | const upwardsMigrations: ((queryInterface: QueryInterface) => Promise)[] = [
237 | createAddColumnMigration('rollout', Version, 100),
238 | createAddColumnMigration('sha1', File, ''),
239 | createAddColumnMigration('sha256', File, ''),
240 | ];
241 |
242 | export default async function () {
243 | const sequelize = new Sequelize({
244 | database: config.sequelize.database,
245 | dialect: config.sequelize.dialect,
246 | username: config.sequelize.username,
247 | password: config.sequelize.password,
248 | host: config.sequelize.host,
249 | port: config.sequelize.port,
250 | storage: config.sequelize.storage,
251 | logging: false,
252 | });
253 |
254 | sequelize.addModels([
255 | File,
256 | Version,
257 | Channel,
258 | TeamMember,
259 | App,
260 | TemporarySave,
261 | TemporarySaveFile,
262 | WebHook,
263 | WebHookError,
264 | Migration,
265 | ]);
266 |
267 | await sequelize.authenticate();
268 | await sequelize.sync();
269 |
270 | const queryInterface = sequelize.getQueryInterface();
271 |
272 | for (const migrationFn of upwardsMigrations) {
273 | await migrationFn(queryInterface);
274 | }
275 |
276 | return sequelize;
277 | }
278 |
--------------------------------------------------------------------------------
/public/components/WebHookManagement.tsx:
--------------------------------------------------------------------------------
1 | import * as React from 'react';
2 |
3 | import AkButton from '@atlaskit/button';
4 | import { FieldTextStateless } from '@atlaskit/field-text';
5 | import AkSpinner from '@atlaskit/spinner';
6 | import AkToolTip from '@atlaskit/tooltip';
7 | import LockIcon from '@atlaskit/icon/glyph/lock';
8 | import WarningIcon from '@atlaskit/icon/glyph/warning';
9 |
10 | import WebHookLogsModal from './WebHookLogsModal';
11 |
12 | import * as styles from './WebHookManagement.scss';
13 |
14 | export interface WebHookManagementProps {
15 | app: NucleusApp;
16 | setApps: (apps: NucleusApp[]) => void;
17 | apps: NucleusApp[];
18 | hasPendingMigration: boolean;
19 | }
20 |
21 | export interface WebHookManagementState {
22 | creatingWebHook: boolean;
23 | deletingWebHooks: Set;
24 | newWebHookURL: string;
25 | newWebHookSecret: string;
26 | showingLogsHook: boolean;
27 | showingLogsHookId: number | null;
28 | }
29 |
30 | export default class WebHookManagement extends React.PureComponent {
31 | state = {
32 | deletingWebHooks: new Set(),
33 | creatingWebHook: false,
34 | newWebHookURL: '',
35 | newWebHookSecret: '',
36 | showingLogsHook: false,
37 | showingLogsHookId: null,
38 | };
39 |
40 | private createWebHook = async () => {
41 | const { app } = this.props;
42 | if (!app) return;
43 | if (!this.state.newWebHookURL.match(/^https?:\/\/.+$/g)) {
44 | return alert('Invalid WebHook URL');
45 | }
46 | this.setState({
47 | creatingWebHook: true,
48 | });
49 | try {
50 | const { hook, success }: { hook: NucleusWebHook, success: boolean } = await (await fetch(
51 | `/rest/app/${app.id}/webhook`,
52 | {
53 | method: 'POST',
54 | credentials: 'include',
55 | body: JSON.stringify({
56 | url: this.state.newWebHookURL,
57 | secret: this.state.newWebHookSecret,
58 | }),
59 | headers: new Headers({
60 | 'Content-Type': 'application/json',
61 | }),
62 | },
63 | )).json();
64 | const newApp = Object.assign({}, app);
65 | newApp.webHooks.push(hook);
66 | this.props.setApps(this.props.apps.map((tApp) => {
67 | if (tApp.id !== newApp.id) return tApp;
68 | return newApp;
69 | }));
70 | } catch (err) {
71 | // Ignore
72 | } finally {
73 | this.setState({
74 | creatingWebHook: false,
75 | newWebHookURL: '',
76 | });
77 | }
78 | }
79 |
80 | private removeWebHook = (hook: NucleusWebHook) => async () => {
81 | const { app } = this.props;
82 | if (!app) return;
83 | if (this.state.deletingWebHooks.has(hook.id)) return;
84 | this.setState({
85 | deletingWebHooks: new Set(this.state.deletingWebHooks.add(hook.id)),
86 | });
87 | try {
88 | const { success, errors }: { success: boolean, errors: NucleusWebHookError[] } = await (await fetch(
89 | `/rest/app/${app.id}/webhook/${hook.id}`,
90 | {
91 | method: 'DELETE',
92 | credentials: 'include',
93 | },
94 | )).json();
95 | const newApp = Object.assign({}, app);
96 | if (success) {
97 | newApp.webHooks = newApp.webHooks.filter(tHook => tHook.id !== hook.id);
98 | } else {
99 | newApp.webHooks = newApp.webHooks.map((tHook) => {
100 | if (tHook.id !== hook.id) return tHook;
101 | tHook.errors = errors;
102 | return tHook;
103 | });
104 | }
105 | this.props.setApps(this.props.apps.map((tApp) => {
106 | if (tApp.id !== newApp.id) return tApp;
107 | return newApp;
108 | }));
109 | } catch (err) {
110 | // Ignore
111 | } finally {
112 | const newSet = new Set(this.state.deletingWebHooks);
113 | newSet.delete(hook.id);
114 | this.setState({
115 | deletingWebHooks: newSet,
116 | });
117 | }
118 | }
119 |
120 | private saveWebHookURL = (e) => {
121 | this.setState({
122 | newWebHookURL: e.target.value,
123 | });
124 | }
125 |
126 | private saveWebHookSecret = (e) => {
127 | this.setState({
128 | newWebHookSecret: e.target.value,
129 | });
130 | }
131 |
132 | private showWebHookLogs = (hook: NucleusWebHook) => () => {
133 | this.setState({
134 | showingLogsHook: true,
135 | showingLogsHookId: hook.id,
136 | });
137 | }
138 |
139 | private closeWebHookLogs = () => {
140 | this.setState({
141 | showingLogsHook: false,
142 | });
143 | }
144 |
145 | render() {
146 | const { app } = this.props;
147 | const logHook = app.webHooks.find(hook => hook.id === this.state.showingLogsHookId);
148 | return (
149 |
150 |
WebHooks
151 |
152 |
153 | {
154 | app.webHooks.map((hook, index) => (
155 |
156 | {
157 | !hook.registered
158 | ? (
159 |
160 |
161 |
162 | ) : null
163 | }
164 |
{hook.url}
165 |
Logs
166 |
167 | {
168 | this.state.deletingWebHooks.has(hook.id)
169 | ? (
170 |
173 | ) : 'Remove'
174 | }
175 |
176 |
177 | ))
178 | }
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 | {
188 | this.state.creatingWebHook
189 | ? (
190 |
193 | ) : 'Create'
194 | }
195 |
196 |
197 |
198 |
199 | );
200 | }
201 | }
202 |
--------------------------------------------------------------------------------