├── .cro.yml ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .tom ├── changes.raku ├── commit.raku ├── git-publish.raku ├── git-summary.raku ├── github-url-https-to-ssh.raku ├── install.raku ├── pull.raku ├── push.raku ├── release.raku ├── restart.raku ├── search.raku ├── set-git.raku ├── status.raku ├── test.raku ├── tomtit-pl6-to-raku.raku ├── update-branch-list.raku └── yaml-lint.raku ├── .travis.yml.off ├── Changes ├── Dockerfile ├── META6.json ├── README.md ├── bin ├── sparky-runner.raku ├── sparky-web.raku ├── sparkyd └── sparman.raku ├── conf └── sparky-cluster.raku ├── css └── style.css ├── db-init.raku ├── docs ├── acl.md ├── api.md ├── auth.md ├── cli.md ├── database.md ├── downstream.md ├── env.md ├── glossary.md ├── job_api.md ├── plugins.md ├── sparman.md ├── stp.md └── ui.md ├── examples ├── git-clone │ ├── sparky.yaml │ └── sparrowfile ├── hello-world │ ├── sparky.yaml │ └── sparrowfile ├── http-request │ ├── sparky.yaml │ └── sparrowfile ├── job-api-https │ ├── sparky.yaml │ └── sparrowfile ├── job-file-01 │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-cluster-many │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-cluster │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-override-attr │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-predefined │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-reqursive │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-wait-child │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-with-class │ ├── sparky.yaml │ └── sparrowfile ├── job-queue-workers │ ├── sparky.yaml │ └── sparrowfile ├── job-queue │ ├── sparky.yaml │ └── sparrowfile ├── job-stash-child-parent │ ├── sparky.yaml │ └── sparrowfile ├── job-stash-parent-child │ ├── sparky.yaml │ └── sparrowfile ├── raku-project │ ├── sparky.yaml │ └── sparrowfile ├── request.json └── scm-trigger │ ├── sparky.yaml │ └── sparrowfile ├── icons ├── build-fail.png ├── build-na.png ├── build-pass.png ├── build-queue.png └── build-run.png ├── images ├── sparky-web-ui-build-with-params.jpeg ├── sparky-web-ui.png ├── sparky-web-ui3.png ├── sparky-web-ui4.png ├── sparky-web-ui5.png ├── sparky-web-ui6.png └── task-result.png ├── js ├── ansi_up.js └── misc.js ├── lib ├── Sparky.rakumod └── Sparky │ ├── HTML.rakumod │ ├── Job.rakumod │ ├── Security.rakumod │ └── Utils.rakumod ├── logos ├── sparky.png ├── sparky.small.png └── sparky.svg ├── misc └── yaml-test.raku ├── systemd ├── sparky-web.service └── sparkyd.service ├── t └── 00-run.t ├── templates ├── about.crotmp ├── build.crotmp ├── builds.crotmp ├── default_login.crotmp ├── project.crotmp ├── projects.crotmp ├── queue.crotmp └── report2.crotmp ├── utils ├── generate-cert.sh ├── install-sparky-single-node.raku ├── install-sparky-web-systemd.raku ├── install-sparky.raku ├── install-sparky.sh ├── install-sparkyd-systemd.raku ├── update-sparky.raku └── update-sparky.sh └── watcher.sh /.cro.yml: -------------------------------------------------------------------------------- 1 | cro: 1 2 | id: sparky 3 | name: Sparky CI Web 4 | entrypoint: bin/sparky-web.raku 5 | ignore: 6 | - .cache 7 | - icons/ 8 | - README.md 9 | - utils/ 10 | - hosts 11 | - examples/ 12 | - sparkyd.log 13 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Deps 2 | 3 | on: 4 | push: 5 | branches: 6 | - '*' 7 | tags-ignore: 8 | - '*' 9 | pull_request: 10 | 11 | jobs: 12 | raku: 13 | strategy: 14 | matrix: 15 | os: 16 | - ubuntu-latest 17 | raku-version: 18 | - "latest" 19 | runs-on: ${{ matrix.os }} 20 | steps: 21 | - uses: actions/checkout@v2 22 | - uses: Raku/setup-raku@v1 23 | with: 24 | raku-version: ${{ matrix.raku-version }} 25 | - name: Install Dependencies 26 | run: zef install . --/test 27 | 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | lib/.precomp/ 2 | .precomp 3 | *.tar.gz 4 | .reports 5 | db.sqlite3 6 | .cache 7 | .sparrowdo 8 | .triggers 9 | *.log 10 | -------------------------------------------------------------------------------- /.tom/changes.raku: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | bash "head Changes"; 4 | -------------------------------------------------------------------------------- /.tom/commit.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | my $msg = prompt("message: "); 4 | 5 | task-run "commit my changes", "git-commit", %( message => $msg ); 6 | 7 | -------------------------------------------------------------------------------- /.tom/git-publish.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | EVALFILE ".tom/commit.raku"; 4 | EVALFILE ".tom/push.raku"; 5 | -------------------------------------------------------------------------------- /.tom/git-summary.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | bash "git diff"; 4 | bash "git status"; 5 | -------------------------------------------------------------------------------- /.tom/github-url-https-to-ssh.raku: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | task-run "fix-https-ssh", "git-url-https-to-ssh"; 4 | 5 | 6 | -------------------------------------------------------------------------------- /.tom/install.raku: -------------------------------------------------------------------------------- 1 | bash "zef install . --force-install --/test"; 2 | -------------------------------------------------------------------------------- /.tom/pull.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | task-run "git pull", "git-pull"; 4 | 5 | -------------------------------------------------------------------------------- /.tom/push.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | task-run "git push", "git-push", %( confirm => "no" ); 4 | -------------------------------------------------------------------------------- /.tom/release.raku: -------------------------------------------------------------------------------- 1 | bash "fez upload"; 2 | 3 | -------------------------------------------------------------------------------- /.tom/restart.raku: -------------------------------------------------------------------------------- 1 | bash "sparman.raku worker_ui stop && sparman.raku worker_ui start"; 2 | -------------------------------------------------------------------------------- /.tom/search.raku: -------------------------------------------------------------------------------- 1 | my $ext = prompt("ext (rakumod): "); 2 | 3 | $ext = "rakumod" unless $ext; 4 | 5 | my $search1 = prompt("search1: "); 6 | 7 | my $search2 = prompt("search2: "); 8 | 9 | my $exclude = prompt("exclude: "); 10 | 11 | say "find [$search1] [$search2] !$exclude in $ext"; 12 | 13 | task-run "find $search1 $search2 in $ext", "find", %( 14 | :$ext, 15 | :$search1, 16 | search2 => $search2 || "", 17 | exclude => $exclude || "", 18 | ); 19 | 20 | -------------------------------------------------------------------------------- /.tom/set-git.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | task-run "set git", "git-base", %( 4 | email => 'melezhik@gmail.com', 5 | name => 'Alexey Melezhik', 6 | config_scope => 'local', 7 | set_credential_cache => 'on' 8 | ); 9 | -------------------------------------------------------------------------------- /.tom/status.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | bash "git status"; 4 | 5 | -------------------------------------------------------------------------------- /.tom/test.raku: -------------------------------------------------------------------------------- 1 | task-run "check json files", "json-lint", %( path => "{$*CWD}" ); 2 | bash "zef test ."; 3 | -------------------------------------------------------------------------------- /.tom/tomtit-pl6-to-raku.raku: -------------------------------------------------------------------------------- 1 | task-run "renaming", "tomtit-pl6-to-raku", %( 2 | dir => ".tom/" 3 | ); 4 | -------------------------------------------------------------------------------- /.tom/update-branch-list.raku: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | bash "git remote update origin --prune"; 4 | 5 | -------------------------------------------------------------------------------- /.tom/yaml-lint.raku: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | task-run "lint modified files", "yaml-lint"; 4 | 5 | -------------------------------------------------------------------------------- /.travis.yml.off: -------------------------------------------------------------------------------- 1 | language: minimal 2 | 3 | script: 4 | - curl -d thing=https://github.com/melezhik/Sparky http://rakudist.raku.org/ci -s | bash 5 | -------------------------------------------------------------------------------- /Changes: -------------------------------------------------------------------------------- 1 | Revision history for Sparky 2 | 3 | {{$NEXT}} 4 | 5 | 0.2.11 2025-04-04T21:51:00Z 6 | 7 | - Improved algorithm 8 | after v 0.2.10 fix 9 | 10 | 0.2.10 2025-04-04T21:18:00Z 11 | 12 | - Improved algorithm 13 | of sending data to 14 | browser via web socket 15 | 16 | 0.2.9 2025-02-20T14:50:00Z 17 | 18 | - Make sparman part of 19 | Sparky distribution 20 | 21 | - Comment some noisy logs 22 | 23 | 0.2.8 2024-07-14T12:30:00Z 24 | 25 | - Migrate ci pipeline to Sparky 26 | 27 | - Use img.shields.io to render 28 | project badges 29 | - Fix issues with a usage of 30 | group variables 31 | and template varibaes 32 | together 33 | 34 | 0.2.7 2024-06-14T13:30:00Z 35 | 36 | - Huge overhaul of documentation 37 | - UI - subtasks 38 | - UI - multiple select 39 | - Artifacts (not yet documented) 40 | 41 | 0.2.6 2024-05-31T11:20:00Z 42 | 43 | - Spruce up CSS 44 | 45 | 0.2.5 2024-05-30T13:30:00Z 46 | 47 | - Add default login/password 48 | for default authentication 49 | 50 | 0.2.4 2024-05-30T10:35:00Z 51 | 52 | - Switch to bulma 1.0, 53 | remove custom themes 54 | - Add default authentication 55 | protocol as a default 56 | authentication schema 57 | - Fix bug in sparkyd scheduller 58 | that prevented scm 59 | projects with crontab 60 | settings from run 61 | 62 | 0.2.3 2024-04-26T16:30:00Z 63 | 64 | - Change authentication protocol 65 | to oauth 2.0 66 | - Bump Sparrow6 version 67 | 68 | 0.2.2 2024-03-21T14:30:00Z 69 | 70 | - Fix issue with long reports 71 | hanging browsers (now web socket data 72 | is send with chunks ) 73 | 74 | 0.2.1 2024-03-08T21:20:00Z 75 | 76 | - Fix various issues with template vars 77 | - Fix CPU too much consumtion 78 | over web socket on the main page 79 | - Bump Sparrow6 deps version 80 | 81 | 0.2.0 2024-02-04T23:50:00Z 82 | 83 | - Huge refactoring of UI, 84 | web sockets support 85 | - Sparkyd no longer crashes 86 | on broken yamls 87 | - Add password UI control 88 | - Template variables support 89 | 90 | 0.1.16 2024-01-05T21:00:00Z 91 | 92 | - Support of new sparrowdo 93 | --image option to run 94 | docker containers from 95 | images 96 | 97 | 0.1.15 2024-01-03T17:50:00Z 98 | 99 | - Depends on the latest version 100 | of Sparrow6 101 | - #60 - Replace JSON::Tiny 102 | by JSON::Fast 103 | - #61 - Renaming *.pl6 to *.raku 104 | 105 | 0.1.14 2023-29-12T17:00:00Z 106 | 107 | - Build paramaters, UI 108 | checkboxes support 109 | 110 | 0.1.13 2023-28-12T17:20:00Z 111 | 112 | - Introduce new logo 113 | - New feature - Add build runtime 114 | and default parameters 115 | - Small graphic design / UI changes 116 | 117 | 0.1.12 2023-26-12T21:20:00Z 118 | 119 | - Fix regression bug that stops sparky-runner.raku 120 | from running 121 | 122 | 0.1.11 2023-25-11T23:00:00Z 123 | 124 | - Describe flappers protection mechanism 125 | - Allow to disable flappers protection mechanism 126 | using various methods 127 | 128 | 0.1.10 2023-02-28T23:10:00Z 129 | 130 | - Fix sort order of triggered job 131 | to support priority queues in Sparrow 132 | - Make the first git tag - #57 for 133 | people willing to self-host 134 | 135 | 0.1.9 2023-01-27T17:20:00Z 136 | 137 | - sparkyd - don't fail when unexisting 138 | branch is supplied with scm triggering 139 | - sparkyd - traverse triggers files in order of creation, 140 | to support queues with priorities 141 | 142 | 0.1.8 2022-09-20T14:20:00Z 143 | 144 | - Fix job files API (various issues) 145 | - UI - Set default theme to `slate` 146 | - Enable colorful output, now it looks much more nicer! 147 | - Update Dockerfile (use latest version of Rakudo) 148 | - Enable zef deps report in .sparkyci.yaml 149 | 150 | 0.1.7 2022-08-05T22:40:00Z 151 | 152 | - New feature - http basic authentication support (enabled by default) 153 | - Bug fix - fix broken rebuild button 154 | - Security fix - rebuilding for project without sparky.yaml is forbidden 155 | - New feature - use DBIish::Pool with mysql/postgresql drivers to 156 | deal with https://github.com/raku-community-modules/DBIish/issues/222 157 | - New feature - `SPARKY_MAX_JOBS` - threshold of concurrent jobs maximum 158 | number to protect Sparky server from overload. 159 | - New feature - job files API 160 | - Bug fix - proper merge of sparky.yaml/sparrowdo.tags and .trigger/sparrowdo.tags 161 | - Improvement - add timeout for `git ls-remote` to prevent hanging for protected URLs 162 | - Improvement - flappering scm jobs protection (removal from scheduler) 163 | - Improvement - dead (finished spawned jobs) removed from scheduler 164 | - Improvement - speed/load optimization - get job statuses from file cache instead of database 165 | 166 | 0.1.6 2022-03-13T05:20:00Z 167 | 168 | - Improvement - cache database connections to avoid 169 | too many database connection bug 170 | 171 | - Bug fix - calculate build_id using job_id condition 172 | to avoid subtle / random bugs 173 | 174 | - Bug fix - various small fixes for mysql db engine 175 | 176 | - Breaking change, DB schema. builds.key column renamed to builds.job_id because mysql does 177 | not allow to name a column as a `key` 178 | 179 | - HTTP API - new endpoint - build-with-tags, to pass named parameters to a build 180 | 181 | - `SPARKY_HOST` variable now allows to change web app tcp host 182 | 183 | 0.1.5 2022-02-12T00:00:00Z 184 | 185 | - New feature - JobApi 186 | - Minor fixes in web UI templates 187 | - Use Text::Markdown instead of javascript library to parse README.md in /about page 188 | - Refactoring - don't use Hash::Merge anymore (rewritten using better approach) 189 | - TLS support 190 | 191 | 0.1.4 2021-12-14T00:00:00Z 192 | 193 | - Support spaces in sparrowdo tags 194 | 195 | 0.1.3 2021-09-15T10:03:00Z 196 | 197 | - SPARKY_HTTP_ROOT bug fix 198 | 199 | 0.1.2 2021-09-07T05:34:00Z 200 | 201 | - Clean up readme again 202 | 203 | 0.1.1 2021-09-07T05:34:00Z 204 | 205 | - Clean up readme 206 | - Remove obsolete dependencies 207 | 208 | 0.1.0 2021-09-07T03:16:00Z 209 | 210 | - Move from Bailador to Cro 211 | - Minor css/html bug fixes 212 | - Slight improvement of documentation 213 | - Badges for projects ( based on @thibaultduponchelle pull request ) 214 | - Sandstone is default bulma theme 215 | 216 | 0.0.30 2021-01-10T23:54:47Z 217 | 218 | - JS markdown parser and syntax highlighter in about page. Removing Text::Markdown 219 | - Highlight code on project page 220 | 221 | 0.0.29 2021-01-04T00:22:18Z 222 | 223 | - New feature: triggering builds on SCM changes 224 | - Lock files mechanism to prevent excessive crontab runs 225 | - Allow manual run feature 226 | - Fix: rebuild action now works properly 227 | - Use Hash::Merge 2.0.0+ version (see https://github.com/melezhik/sparky/pull/2) 228 | - Solar is default bulma theme 229 | - Update examples, remove obsolete code 230 | - Update README, make it more accurate 231 | - UI improvements, kudos to @thibaultduponchelle 232 | 233 | 0.0.28 2020-11-09T17:34:45Z 234 | 235 | - Add scripts missing during previous release 236 | 237 | 0.0.27 2020-11-04T19:06:12Z 238 | 239 | - change scripts extension: `pl6` -> `raku` 240 | - db-init.raku - Use HOME env variable to make script working on all linux/mac platforms 241 | 242 | 0.0.26 2020-07-26T13:04:29-05:00 243 | 244 | - New feature - FTP - file triggering protocol 245 | - FTP changes - `conf` option is deprecated, more options are handled via `sparrowdo` section 246 | - Fix - handle path to sparrowdo configuration file 247 | 248 | 0.0.25 2018-12-04T22:01:02Z 249 | 250 | - Trigger build from UI 251 | - Switched to Bulma css framework 252 | - Projects pages - #8 253 | - Systemd install script 254 | 255 | # 0.0.24 256 | 257 | - Sparky modules support 258 | - Skip bootstrap by using `bootstrap: false` in sparky.yaml file 259 | - Ssh private key hotfix 260 | 261 | # 0.0.23 262 | 263 | - remove old debug code 264 | - language correction in readme file 265 | 266 | # 0.0.22 267 | 268 | * Sparky distribution moved to CPAN 269 | * sparky.yaml - respect for `docker` parameter 270 | * docker exec do not allocate pseudo tty 271 | * format error fix 272 | 273 | # 0.0.21 274 | 275 | * SQL bug fix ( for project column ) 276 | 277 | # 0.0.20 278 | 279 | * Database engine now is configurable. Yes, you can use MySQL and PostgreSQL! 280 | 281 | # 0.0.19 282 | 283 | * Use Data::Dump to dump config in a log 284 | 285 | # 0.0.18 286 | 287 | * Crontab entries check logic 288 | 289 | # 0.0.17 290 | 291 | * Refactoring, child processes logic improved 292 | 293 | # 0.0.16 294 | 295 | * Downstream projects 296 | * Improved logging 297 | 298 | # 0.0.15 299 | 300 | Change the logic of runners spawner, trying to deal with memory leaks 301 | 302 | # 0.0.14 303 | 304 | 2017-07-31 305 | 306 | * Improved web ui ( adjusted twitter bootstrap theme ) 307 | 308 | # 0.0.13 309 | 310 | 2017-07-28 311 | 312 | * Improved sparky-runner.pl6 logic, when gets run standalone 313 | 314 | # 0.0.12 315 | 316 | 2017-07-28 317 | 318 | * Refactoring - `--report-root` and `--stdout` options are abolished 319 | 320 | # 0.0.11 321 | 322 | 2017-07-27 323 | 324 | * Minor documentation improvements 325 | 326 | # 0.0.10 327 | 328 | 2017-07-27 329 | 330 | * Rewrote documentation, hide some internal stuff. 331 | 332 | # 0.0.9 333 | 334 | 2017-07-27 335 | 336 | * Add web-ui 337 | 338 | # 0.0.8 339 | 340 | 2017-07-26 341 | 342 | * sparrowdo/sparky config mess bugfix 343 | 344 | # 0.0.7 345 | 346 | 2017-07-26 347 | 348 | * First working version with purging old builds 349 | 350 | # 0.0.4 351 | 352 | 2017-07-25 353 | 354 | * Small improvements for sparky-runner default settings 355 | 356 | # 0.0.3 357 | 358 | 2017-07-25 359 | 360 | * Tweaked documentation a bit. 361 | 362 | # 0.0.2 363 | 364 | 2017-07-19 365 | 366 | * Changed logic of timeouts. 367 | * Minor corrections in runner ( ignore crontab entries when gets run directly ). 368 | 369 | # 0.0.1 370 | 371 | 2017-07-19 372 | 373 | * Just a first version. 374 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:latest 2 | ENV PATH="/home/raku/.raku/bin:/opt/rakudo-pkg/bin:${PATH}" 3 | RUN apk update && apk add openssl bash curl wget perl openssl-dev sqlite sqlite-dev sudo git 4 | RUN apk add --no-cache bash 5 | RUN curl -1sLf \ 6 | 'https://dl.cloudsmith.io/public/nxadm-pkgs/rakudo-pkg/setup.alpine.sh' \ 7 | | bash 8 | RUN apk add rakudo-pkg 9 | RUN adduser -D -h /home/raku -s /bin/bash -G wheel raku 10 | RUN echo '%wheel ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers 11 | RUN addgroup raku wheel 12 | RUN sudo echo 13 | USER raku 14 | RUN git clone https://github.com/ugexe/zef.git /tmp/zef && \ 15 | cd /tmp/zef && \ 16 | raku -I. bin/zef install . --/test --install-to=home 17 | RUN zef update 18 | RUN zef install --/test JSON::Unmarshal 19 | RUN zef install --/test IO::Socket::Async::SSL 20 | RUN zef install --/test JSON::Fast 21 | RUN zef install --/test OO::Monitors 22 | RUN zef install --/test Shell::Command 23 | RUN zef install --/test Docker::File 24 | RUN zef install --/test File::Ignore 25 | RUN zef install --/test DBIish::Pool 26 | RUN zef install --/test JSON::JWT 27 | RUN zef install --/test HTTP::HPACK 28 | RUN sudo apk add build-base libffi-dev 29 | RUN zef install --/test Digest 30 | RUN zef install --/test Cro::TLS 31 | RUN zef install --/test Log::Timeline 32 | RUN zef install --/test Text::Markdown 33 | RUN zef install --/test Terminal::ANSIColor 34 | RUN zef install --/test Base64 35 | RUN zef install --/test Digest::SHA1::Native 36 | RUN zef install --/test Crypt::Random 37 | RUN zef install --/test IO::Socket::SSL 38 | RUN echo OK1 && zef install --/test https://github.com/melezhik/Sparrow6.git 39 | RUN echo OK2 && zef install --/test https://github.com/melezhik/sparrowdo.git 40 | RUN echo OK3 && zef install --/test https://github.com/melezhik/Tomtit.git 41 | RUN echo OK4 && zef install --/test https://github.com/melezhik/Tomty.git 42 | RUN echo OK5 && zef install --/test --force-install https://github.com/melezhik/sparky-job-api.git 43 | RUN echo OK6 && zef install --/test --force-install https://github.com/melezhik/sparky.git 44 | RUN ls -l && git clone https://github.com/melezhik/sparky.git /home/raku/Sparky 45 | WORKDIR /home/raku/Sparky 46 | RUN ls -l && raku db-init.raku 47 | EXPOSE 4000 48 | RUN cp -r examples/hello-world/ /home/raku/.sparky/projects/ 49 | ENTRYPOINT nohup sparkyd 2>&1 & cro run 50 | -------------------------------------------------------------------------------- /META6.json: -------------------------------------------------------------------------------- 1 | { 2 | "auth": "zef:melezhik", 3 | "authors" : [ 4 | "Alexey Melezhik" 5 | ], 6 | "build-depends" : [ ], 7 | "depends" : [ 8 | "YAMLish", 9 | "DBIish", 10 | "DBIish::Pool", 11 | "JSON::Fast", 12 | "HTTP::Tiny", 13 | "Sparrow6:ver<0.0.49+>", 14 | "Sparrowdo:ver<0.1.27+>", 15 | "Time::Crontab", 16 | "File::Directory::Tree", 17 | "cro", 18 | "Cro::HTTP:ver<0.8.9+>", 19 | "Cro::HTTP::Server", 20 | "Cro::HTTP::Router", 21 | "Cro::WebApp::Template", 22 | "Data::Dump:ver<0.0.12+>", 23 | "Text::Markdown" 24 | ], 25 | "description" : "Sparky is a flexible and minimalist continuous integration server and distribute tasks runner written in Raku", 26 | "license" : "Artistic-2.0", 27 | "name" : "Sparky", 28 | "perl" : "6.*", 29 | "provides" : { 30 | "Sparky" : "lib/Sparky.rakumod", 31 | "Sparky::Security" : "lib/Sparky/Security.rakumod", 32 | "Sparky::HTML" : "lib/Sparky/HTML.rakumod", 33 | "Sparky::Job" : "lib/Sparky/Job.rakumod", 34 | "Sparky::Utils" : "lib/Sparky/Utils.rakumod" 35 | }, 36 | "resources" : [ ], 37 | "source-type" : "git", 38 | "source-url" : "https://github.com/melezhik/sparky.git", 39 | "support" : { 40 | "bugtracker" : "https://github.com/melezhik/sparky/issues", 41 | "source" : "https://github.com/melezhik/sparky.git" 42 | }, 43 | "tags" : [ ], 44 | "test-depends" : [ ], 45 | "version" : "0.2.11" 46 | } 47 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SYNOPSIS 2 | 3 | Sparky is a flexible and minimalist continuous integration server and distribute tasks runner written in Raku. 4 | 5 | ![Sparky Logo](https://raw.githubusercontent.com/melezhik/sparky/master/logos/sparky.small.png) 6 | 7 | Sparky features: 8 | 9 | * Defining jobs scheduling times in crontab style 10 | * Triggering jobs using external APIs and custom logic 11 | * Jobs scenarios are pure Raku code with additional support of [Sparrow6](https://github.com/melezhik/Sparrow6/blob/master/documentation/dsl.md) automation framework 12 | * Use of plugins on different programming languages 13 | * Everything is kept in SCM repository - easy to port, maintain and track changes 14 | * Jobs get run in one of 3 flavors - 1) on localhost 2) on remote machines via ssh 3) on docker instances 15 | * Nice web UI to run jobs and read reports 16 | * Could be runs in a peer-to-peer network fashion with distributed tasks support 17 | 18 | # Build status 19 | 20 | ![Github actions](https://github.com/melezhik/sparky/actions/workflows/main.yml/badge.svg) 21 | ![SparrowCI](https://ci.sparrowhub.io/project/gh-melezhik-sparky/badge) 22 | 23 | # Sparky workflow in 4 lines: 24 | 25 | ```bash 26 | $ nohup sparkyd & # run Sparky daemon to trigger jobs 27 | $ nohup cro run & # run Sparky CI UI to see job statuses and reports 28 | $ nano ~/.sparky/projects/my-project/sparrowfile # write a job scenario 29 | $ firefox 127.0.0.1:4000 # run jobs and get reports 30 | ``` 31 | 32 | # Installation 33 | 34 | ```bash 35 | $ sudo apt-get install sqlite3 36 | $ git clone https://github.com/melezhik/sparky.git 37 | $ cd sparky && zef install . 38 | ``` 39 | 40 | ## Database initialization 41 | 42 | Sparky requires a database to operate. 43 | 44 | Run database initialization script to populate database schema: 45 | 46 | ```bash 47 | $ raku db-init.raku 48 | ``` 49 | 50 | # Sparky components 51 | 52 | Sparky comprises of several components: 53 | 54 | * Jobs scheduler 55 | 56 | * Jobs Definitions 57 | 58 | * Jobs workers (including remote jobs) 59 | 60 | * Jobs UI 61 | 62 | * CLI 63 | 64 | ## Job scheduler 65 | 66 | To run Sparky jobs scheduler (aka daemon) runs in console: 67 | 68 | ```bash 69 | $ sparkyd 70 | ``` 71 | 72 | Scheduler logic: 73 | 74 | * Sparky daemon traverses sub directories found at the project root directory. 75 | 76 | * For every directory found initiate job run process invoking sparky worker ( `sparky-runner.raku` ). 77 | 78 | * Sparky root directory default location is `~/.sparky/projects`. 79 | 80 | * Once all the sub directories are passed, sparky daemon sleeps for $timeout seconds. 81 | 82 | * A `timeout` option allows to balance a load on your system. 83 | 84 | * You can change a timeout by applying `--timeout` parameter when running sparky daemon: 85 | 86 | ```bash 87 | $ sparkyd --timeout=600 # sleep 10 minutes 88 | ``` 89 | 90 | * You can also set a timeout by using `SPARKY_TIMEOUT` environment variable: 91 | 92 | ```bash 93 | $ SPARKY_TIMEOUT=30 sparkyd ... 94 | ``` 95 | 96 | Running job scheduler in demonized mode: 97 | 98 | ```bash 99 | $ nohup sparkyd & 100 | ``` 101 | 102 | To install sparkyd as a systemd unit: 103 | 104 | ```bash 105 | $ nano utils/install-sparky-web-systemd.raku # change working directory and user 106 | $ sparrowdo --sparrowfile=utils/install-sparkyd-systemd.raku --no_sudo --localhost 107 | ``` 108 | 109 | ## Sparky Jobs UI 110 | 111 | Sparky has a simple web UI to allow trigger jobs and get reports. 112 | 113 | To run Sparky UI web application: 114 | 115 | ```bash 116 | $ cro run 117 | ``` 118 | 119 | To install Sparky CI web app as a systemd unit: 120 | 121 | ```bash 122 | $ nano utils/install-sparky-web-systemd.raku # change working directory, user and root directory 123 | $ sparrowdo --sparrowfile=utils/install-sparky-web-systemd.raku --no_sudo --localhost 124 | ``` 125 | 126 | By default Sparky UI application listens on host `0.0.0.0`, port `4000`, 127 | to override these settings set `SPARKY_HOST`, `SPARKY_TCP_PORT` 128 | in `~/sparky.yaml` configuration file: 129 | 130 | ```yaml 131 | SPARKY_HOST: 127.0.0.1 132 | SPARKY_TCP_PORT: 5000 133 | ``` 134 | 135 | ## Sparky jobs definitions 136 | 137 | Sparky job needs a directory located at the sparky root directory: 138 | 139 | ```bash 140 | $ mkdir ~/.sparky/projects/teddy-bear-app 141 | ``` 142 | 143 | To create a job scenario, create file named `sparrowfile` located in job directory. 144 | 145 | Sparky uses pure [Raku](https://raku.org) for job syntax, for example: 146 | 147 | ```bash 148 | $ nano ~/.sparky/projects/hello-world/sparrowfile 149 | ``` 150 | 151 | ```raku 152 | #!raku 153 | say "hello Sparky!"; 154 | ``` 155 | 156 | To allow job to be executed by scheduler one need to create `sparky.yaml` - yaml based 157 | job definition, minimal form would be: 158 | 159 | ```bash 160 | $ nano ~/.sparky/projects/hello-world/sparky.yaml 161 | ``` 162 | 163 | ```yaml 164 | allow_manual_run: true 165 | ``` 166 | 167 | ## Extending scenarios with Sparrow automation framework 168 | 169 | To extend core functions, Sparky is fully integrated with [Sparrow](https://github.com/melezhik/Sparrow6) automation framework. 170 | 171 | Here in example of job that uses Sparrow plugins, to build typical Raku project: 172 | 173 | ```bash 174 | $ nano ~/.sparky/projects/raku-build/sparrowfile 175 | ``` 176 | 177 | ```raku 178 | directory "project"; 179 | 180 | git-scm 'https://github.com/melezhik/rakudist-teddy-bear.git', %( 181 | to => "project", 182 | ); 183 | 184 | zef "{%*ENV}/project", %( 185 | depsonly => True 186 | ); 187 | 188 | zef 'TAP::Harness App::Prove6'; 189 | 190 | bash 'prove6 -l', %( 191 | debug => True, 192 | cwd => "{%*ENV}/project/" 193 | ); 194 | ``` 195 | 196 | Repository of Sparrow plugins is available at [https://sparrowhub.io](https://sparrowhub.io) 197 | 198 | ## Sparky workers 199 | 200 | Sparky uses [Sparrowdo](https://github.com/melezhik/sparrowdo) to launch jobs in three fashions: 201 | 202 | * on localhost ( the same machine where Sparky is installed, default) 203 | * on remote host with ssh 204 | * docker container on localhost / remote machine 205 | 206 | ``` 207 | /--------------------\ [ localhost ] 208 | | Sparky on localhost| --> sparrowdo client --> job (sparrow) --> [ container ] 209 | \--------------------/ [ ssh host ] 210 | ``` 211 | 212 | By default job scenarios get executed _on the same machine you run Sparky at_, 213 | to run jobs on _remote host_ set sparrowdo section in `sparky.yaml` file: 214 | 215 | ```bash 216 | $ nano ~/.sparky/projects/teddy-bear-app/sparky.yaml 217 | ``` 218 | 219 | ```yaml 220 | sparrowdo: 221 | host: '192.168.0.1' 222 | ssh_private_key: /path/to/ssh_private/key.pem 223 | ssh_user: sparky 224 | no_index_update: true 225 | sync: /tmp/repo 226 | ``` 227 | 228 | Follow [sparrowdo cli](https://github.com/melezhik/sparrowdo#sparrowdo-cli) documentation for `sparrowdo` configuration section explanation. 229 | 230 | ### Skip bootstrap 231 | 232 | Sparrowdo client bootstrap might take some time. 233 | 234 | To disable bootstrap use `bootstrap: false` option. 235 | 236 | Useful if sparrowdo client is already installed on target host. 237 | 238 | ```yaml 239 | sparrowdo: 240 | bootstrap: false 241 | ``` 242 | 243 | ### Purging old builds 244 | 245 | To remove old job builds set `keep_builds` parameter in `sparky.yaml`: 246 | 247 | ```bash 248 | $ nano ~/.sparky/projects/teddy-bear-app/sparky.yaml 249 | ``` 250 | 251 | Put number of builds to keep: 252 | 253 | ```yaml 254 | keep_builds: 10 255 | ``` 256 | 257 | That makes Sparky remove old builds and only keep last `keep_builds` builds. 258 | 259 | ### Run jobs by cron 260 | 261 | To run Sparky jobs periodically, set `crontab` entry in sparky.yaml file. 262 | 263 | For example, to run a job every hour at 30,50 or 55 minutes: 264 | 265 | ```bash 266 | $ nano ~/.sparky/projects/teddy-bear-app/sparky.yaml 267 | ``` 268 | 269 | ```cron 270 | crontab: "30,50,55 * * * *" 271 | ``` 272 | 273 | Follow [Time::Crontab](https://github.com/ufobat/p6-time-crontab) documentation on crontab entries format. 274 | 275 | ### Manual run 276 | 277 | To trigger job manually from web UI, use `allow_manual_run`: 278 | 279 | ```bash 280 | $ nano ~/.sparky/projects/teddy-bear-app/sparky.yaml 281 | ``` 282 | 283 | ```yaml 284 | allow_manual_run: true 285 | ``` 286 | 287 | ### Trigger job by SCM changes 288 | 289 | To trigger Sparky jobs on SCM changes, define `scm` section in `sparky.yaml` file: 290 | 291 | ```yaml 292 | scm: 293 | url: $SCM_URL 294 | branch: $SCM_BRANCH 295 | ``` 296 | 297 | Where: 298 | 299 | * `url` - git URL 300 | * `branch` - git branch, optional, default value is `master` 301 | 302 | For example: 303 | 304 | ```yaml 305 | scm: 306 | url: https://github.com/melezhik/rakudist-teddy-bear.git 307 | branch: master 308 | ``` 309 | 310 | Once a job is triggered respected SCM data is available via `tags()` function: 311 | 312 | ```raku 313 | directory "scm"; 314 | 315 | say "current commit is: {tags()}"; 316 | 317 | git-scm tags(), %( 318 | to => "scm", 319 | branch => tags 320 | ); 321 | 322 | bash "ls -l {%*ENV}/scm"; 323 | ``` 324 | 325 | To set default values for SCM_URL and SCM_BRANCH, use sparrowdo `tags`: 326 | 327 | `sparky.yaml`: 328 | 329 | ```yaml 330 | sparrowdo: 331 | tags: SCM_URL=https://github.com/melezhik/rakudist-teddy-bear.git,SCM_BRANCH=master 332 | ``` 333 | 334 | These is useful when trigger job manually. 335 | 336 | ### Flappers protection mechanism 337 | 338 | Flapper protection mechanism kicks out SCM urls that are timeouted (certain amount of time) during git connection, from scheduling, this mechanism protects sparkyd worker from stalling. 339 | 340 | To disable flappers protection mechanism, set `SPARKY_FLAPPERS_OFF` environment variable 341 | or adjust `~/sparky.yaml` configuration file: 342 | 343 | ```yaml 344 | worker: 345 | flappers_off: true 346 | ``` 347 | 348 | ### Disable jobs 349 | 350 | To prevent Sparky job from execution use `disable` option: 351 | 352 | ```bash 353 | $ nano ~/.sparky/projects/teddy-bear-app/sparky.yaml 354 | 355 | disabled: true 356 | ``` 357 | 358 | # Advanced topics 359 | 360 | Following are advanced topics covering some cool Sparky features. 361 | 362 | ## Job UIs 363 | 364 | Sparky UI DSL allows to grammatically describe UI for Sparky jobs 365 | and pass user input into a scenario as variables. 366 | 367 | Read more at [docs/ui.md](https://github.com/melezhik/sparky/blob/master/docs/ui.md) 368 | 369 | ## Downstream jobs 370 | 371 | Downstream jobs get run after some _main_ job has finished. 372 | 373 | Read more at [docs/downstream.md](https://github.com/melezhik/sparky/blob/master/docs/downstream.md) 374 | 375 | ## Sparky triggering protocol (STP) 376 | 377 | Sparky triggering protocol allows to trigger jobs automatically by creating files in special format. 378 | 379 | Read more at [docs/stp.md](https://github.com/melezhik/sparky/blob/master/docs/stp.md) 380 | 381 | ## Job API 382 | 383 | Job API allows to orchestrate multiple Sparky jobs. 384 | 385 | Read more at [docs/job_api.md](https://github.com/melezhik/sparky/blob/master/docs/job_api.md) 386 | 387 | ## Sparky plugins 388 | 389 | Sparky plugins is way to extend Sparky jobs by writing reusable plugins as Raku modules. 390 | 391 | Read more at [docs/plugins.md](https://github.com/melezhik/sparky/blob/master/docs/plugins.md) 392 | 393 | ## HTTP API 394 | 395 | Sparky HTTP API allows execute Sparky jobs remotely over HTTP. 396 | 397 | Read more at [docs/api.md](https://github.com/melezhik/sparky/blob/master/docs/api.md) 398 | 399 | ## Security 400 | 401 | ### Authentication 402 | 403 | Sparky web server _comes with_ two authentication protocols, 404 | choose proper one depending on your requirements. 405 | 406 | Read more at [docs/auth.md](https://github.com/melezhik/sparky/blob/master/docs/auth.md) 407 | 408 | ### ACL 409 | 410 | Sparky ACL allows to create access control lists to manage role based access to Sparky resources. 411 | 412 | Read more at [docs/acl.md](https://github.com/melezhik/sparky/blob/master/docs/acl.md) 413 | 414 | ## Databases support 415 | 416 | Sparky keeps it's data in database, by default it uses sqlite, 417 | following databases are supported: 418 | 419 | * SQLite 420 | * MySQL/MariaDB 421 | * PostgreSQL 422 | 423 | Read more at [docs/database.md](https://github.com/melezhik/sparky/blob/master/docs/database.md) 424 | 425 | ## TLS Support 426 | 427 | Sparky web server may run on TLS. To enable this add a couple of parameters to `~/sparky.yaml` 428 | 429 | configuration file: 430 | 431 | ``` 432 | SPARKY_USE_TLS: true 433 | tls: 434 | private-key-file: '/home/user/.sparky/certs/www.example.com.key' 435 | certificate-file: '/home/user/.sparky/certs/www.example.com.cert' 436 | ``` 437 | 438 | `SPARKY_USE_TLS` enables SSL mode and `tls` section has paths to ssl certificate ( key and certificate parts ). 439 | 440 | # Additional topics 441 | 442 | ## Sparman 443 | 444 | Sparman is a cli to ease SparrowCI management. See [docs/sparman.md](docs/sparman.md) document. 445 | 446 | ## Sparky cli 447 | 448 | Sparky cli allows to trigger jobs in terminal. 449 | 450 | Read more at [docs/cli.md](https://github.com/melezhik/sparky/blob/master/docs/cli.md) 451 | 452 | ## Sparky Environment variables 453 | 454 | Use environment variables to tune Sparky configuration. 455 | 456 | Read more at [docs/env.md](https://github.com/melezhik/sparky/blob/master/docs/env.md) 457 | 458 | ## Glossary 459 | 460 | Some useful glossary. 461 | 462 | Read more at [docs/glossary.md](https://github.com/melezhik/sparky/blob/master/docs/glossary.md) 463 | 464 | ## CSS 465 | 466 | Sparky uses [Bulma](https://bulma.io/) as CSS framework for web UI. 467 | 468 | ## Sparky job examples 469 | 470 | Examples of various Sparky jobs could be found at [examples/](https://github.com/melezhik/sparky/tree/master/examples) folder. 471 | 472 | # See also 473 | 474 | * [Cro](https://cro.services) - Raku Web Framework 475 | 476 | * [Sparky-docker](https://github.com/melezhik/sparky-docker) - Run Sparky as Docker container 477 | 478 | # Author 479 | 480 | Alexey Melezhik 481 | -------------------------------------------------------------------------------- /bin/sparky-runner.raku: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env raku 2 | 3 | use Sparky; 4 | use Data::Dump; 5 | use YAMLish; 6 | use File::Directory::Tree; 7 | use Sparky::Utils; 8 | 9 | state $DIR; 10 | state $MAKE-REPORT; 11 | 12 | state %CONFIG; 13 | state $SPARKY-BUILD-STATE; 14 | state $SPARKY-PROJECT; 15 | state $SPARKY-BUILD-ID; 16 | 17 | sub MAIN ( 18 | Str :$dir = "$*CWD", 19 | Bool :$make-report = False, 20 | Str :$marker, 21 | Str :$trigger?, 22 | ) 23 | { 24 | 25 | $DIR = $dir; 26 | 27 | $MAKE-REPORT = $make-report; 28 | 29 | my $project = $dir.IO.basename; 30 | 31 | $SPARKY-PROJECT = $project; 32 | 33 | my $reports-dir = "$dir/../.reports/$project".IO.absolute; 34 | 35 | my %config = read-config($dir); 36 | 37 | say "load sparky.yaml config from $dir .."; 38 | say ">>>", Dump(%config); 39 | mkdir $dir; 40 | 41 | my $build-cache-dir = "$dir/../../work/$project/.triggers".IO.absolute; 42 | my $build-state-dir = "$dir/../../work/$project/.states".IO.absolute; 43 | my $build-files-dir = "$dir/../../work/$project/.files".IO.absolute; 44 | 45 | mkdir $build-cache-dir; # cache dir for triggered builds 46 | mkdir $build-state-dir; # state dir for triggered builds 47 | mkdir $build-files-dir; # files dir for triggered builds 48 | 49 | my $build_id; 50 | 51 | my $dbh; 52 | 53 | my $run-first-time = False; 54 | 55 | my %trigger = Hash.new; 56 | 57 | my $job-id = $trigger ?? $trigger.IO.basename !! "cli_job"; 58 | 59 | if $trigger { 60 | say "loading trigger $trigger into Raku ..."; 61 | %trigger = EVALFILE($trigger); 62 | } 63 | 64 | if $make-report { 65 | 66 | mkdir $reports-dir; 67 | 68 | $dbh = get-dbh( $dir ); 69 | 70 | my $description = %trigger; 71 | 72 | my $sth = $dbh.prepare(q:to/STATEMENT/); 73 | INSERT INTO builds (project, state, description, job_id) 74 | VALUES ( ?,?,?,? ) 75 | STATEMENT 76 | 77 | $sth.execute($project, 0, $description, $job-id); 78 | 79 | # SPEED optimization: 80 | # we use file cache instead of database 81 | # to return build states from http API (sparky-job-api calls f.e.) 82 | # states still exit in a database 83 | # but for the sake of speed and 84 | # not to overload database with requests 85 | # we would rather return states 86 | # by reading them from static files 87 | # not from database entries 88 | 89 | "{$build-state-dir}/{$job-id}".IO.spurt(0); 90 | 91 | $sth = $dbh.prepare(q:to/STATEMENT/); 92 | SELECT max(ID) AS build_id 93 | FROM builds where job_id = ? 94 | STATEMENT 95 | 96 | $sth.execute($job-id); 97 | 98 | my @rows = $sth.allrows(); 99 | 100 | $build_id = @rows[0][0]; 101 | 102 | $sth.finish; 103 | 104 | $SPARKY-BUILD-ID = $build_id; 105 | 106 | $sth = $dbh.prepare(q:to/STATEMENT/); 107 | SELECT count(*) AS build_cnt 108 | FROM builds 109 | WHERE project = ? 110 | STATEMENT 111 | 112 | $sth.execute($project); 113 | 114 | @rows = $sth.allrows(); 115 | 116 | my $build_cnt = @rows[0][0]; 117 | 118 | $sth.finish; 119 | 120 | if $build_cnt == 1 { 121 | $run-first-time = True; 122 | say "RUN BUILD $project" ~ '@' ~ $build_id ~ ' (first time)'; 123 | } else { 124 | say "RUN BUILD $project" ~ '@' ~ $build_id; 125 | } 126 | 127 | } else { 128 | 129 | say "RUN BUILD <$project>"; 130 | 131 | } 132 | 133 | my $sparrowdo-run = "sparrowdo --prefix=$project"; 134 | 135 | my %sparrowdo-config = %config || Hash.new; 136 | 137 | my %shared-vars; 138 | my %host-vars; 139 | my $error; 140 | 141 | if "$dir/../../templates/vars.yaml".IO ~~ :f { 142 | 143 | say "templates: load shared vars from vars.yaml"; 144 | 145 | try { %shared-vars = load-yaml("$dir/../../templates/vars.yaml".IO.slurp) }; 146 | 147 | if $! { 148 | $error ~= $!; 149 | say "project/$project: error parsing $dir/../../templates/var.yaml"; 150 | say $error; 151 | } 152 | 153 | } 154 | 155 | if "$dir/../../templates/hosts/{hostname()}/vars.yaml".IO ~~ :f { 156 | 157 | say "templates: load host vars from {hostname()}/vars.yaml"; 158 | 159 | try { %host-vars = load-yaml("$dir/../../templates/hosts/{hostname()}/vars.yaml".IO.slurp) }; 160 | 161 | if $! { 162 | $error ~= $!; 163 | say "project/$project: error parsing $dir/../../templates/hosts/{hostname()}/vars.yaml"; 164 | say $error; 165 | } 166 | 167 | } 168 | 169 | if %trigger { 170 | for %trigger.keys -> $k { 171 | if $k eq "tags" { 172 | if %sparrowdo-config{$k} { 173 | %sparrowdo-config{$k} ~= ",{%trigger{$k}}" 174 | } else { 175 | %sparrowdo-config{$k} = %trigger{$k} 176 | } 177 | } else { 178 | %sparrowdo-config{$k} = %trigger{$k}; 179 | } 180 | } 181 | # handle conflicting parameters 182 | if %trigger { 183 | %sparrowdo-config:delete; 184 | %sparrowdo-config:delete; 185 | } elsif %trigger { 186 | %sparrowdo-config:delete; 187 | %sparrowdo-config:delete; 188 | } elsif %trigger { 189 | %sparrowdo-config:delete; 190 | %sparrowdo-config:delete; 191 | } 192 | if %trigger { 193 | %sparrowdo-config:delete; 194 | } 195 | } 196 | 197 | if %sparrowdo-config { 198 | $sparrowdo-run ~= " --docker=" ~ %sparrowdo-config; 199 | } elsif %sparrowdo-config { 200 | $sparrowdo-run ~= " --host=" ~ %sparrowdo-config; 201 | } else { 202 | %sparrowdo-config = True; 203 | $sparrowdo-run ~= " --localhost"; 204 | } 205 | 206 | if %sparrowdo-config { 207 | $sparrowdo-run ~= " --image=" ~ %sparrowdo-config; 208 | } 209 | 210 | if %sparrowdo-config { 211 | $sparrowdo-run ~= " --repo=" ~ %sparrowdo-config; 212 | } 213 | 214 | if %sparrowdo-config { 215 | $sparrowdo-run ~= " --sync=" ~ %sparrowdo-config; 216 | } 217 | 218 | if %sparrowdo-config { 219 | $sparrowdo-run ~= " --no_sudo"; 220 | } 221 | 222 | if %sparrowdo-config { 223 | $sparrowdo-run ~= " --conf=" ~ %sparrowdo-config; 224 | } 225 | 226 | if %sparrowdo-config and ! $run-first-time { 227 | $sparrowdo-run ~= " --no_index_update"; 228 | } 229 | 230 | if %sparrowdo-config { 231 | $sparrowdo-run ~= " --ssh_user=" ~ %sparrowdo-config; 232 | } 233 | 234 | if %sparrowdo-config { 235 | $sparrowdo-run ~= " --ssh_private_key=" ~ %sparrowdo-config; 236 | } 237 | 238 | if %sparrowdo-config { 239 | $sparrowdo-run ~= " --ssh_port=" ~ %sparrowdo-config; 240 | } 241 | 242 | 243 | if %sparrowdo-config { 244 | for %sparrowdo-config ~~ m:global/"%" (\S+?) "%"/ -> $c { 245 | my $var_id = $c[0].Str; 246 | # apply vars from host vars first 247 | my $host-var = get-template-var(%host-vars,$var_id); 248 | if defined($host-var) { 249 | if $host-var.isa(Str) or $host-var.isa(Rat) or $host-var.isa(Int) { 250 | %sparrowdo-config.=subst("%{$var_id}%",$host-var,:g); 251 | } elsif $host-var.isa(Hash) { 252 | my @tags; 253 | for $host-var.keys.sort -> $v { 254 | @tags.push: "$v={$host-var{$v}}" 255 | } 256 | %sparrowdo-config = @tags.join(","); 257 | } 258 | say "project/$project: sparrowdo.tags - insert tags %{$var_id}% from host vars"; 259 | next; 260 | } 261 | my $shared-var = get-template-var(%shared-vars,$var_id); 262 | if defined($shared-var) { 263 | if $shared-var.isa(Str) or $shared-var.isa(Rat) or $shared-var.isa(Int) { 264 | %sparrowdo-config.=subst("%{$var_id}%",$shared-var,:g); 265 | } elsif $shared-var.isa(Hash) { 266 | my @tags; 267 | for $shared-var.keys.sort -> $v { 268 | @tags.push: "$v={$host-var{$v}}" 269 | } 270 | %sparrowdo-config = @tags.join(","); 271 | } 272 | say "project/$project: sparrowdo.tags - insert tags %{$var_id}% from host vars"; 273 | next; 274 | } 275 | } 276 | %sparrowdo-config ~= ",SPARKY_PROJECT={$project}"; 277 | %sparrowdo-config ~= ",SPARKY_JOB_ID={$trigger.IO.basename}" if $trigger; 278 | %sparrowdo-config ~= ",SPARKY_WORKER=docker" if %sparrowdo-config; 279 | %sparrowdo-config ~= ",SPARKY_WORKER=localhost" if %sparrowdo-config; 280 | %sparrowdo-config ~= ",SPARKY_WORKER=host" if %sparrowdo-config; 281 | %sparrowdo-config ~= ",SPARKY_TCP_PORT={sparky-tcp-port()}"; 282 | %sparrowdo-config ~= ",SPARKY_API_TOKEN={sparky-api-token()}" if sparky-api-token(); 283 | %sparrowdo-config ~= ",SPARKY_USE_TLS=1" if sparky-use-tls(); 284 | $sparrowdo-run ~= " --tags='{%sparrowdo-config}'"; 285 | } elsif $trigger { 286 | $sparrowdo-run ~= " --tags=SPARKY_PROJECT={$project},SPARKY_JOB_ID={$trigger.IO.basename},SPARKY_TCP_PORT={sparky-tcp-port()}"; 287 | $sparrowdo-run ~= ",SPARKY_WORKER=docker" if %sparrowdo-config; 288 | $sparrowdo-run ~= ",SPARKY_WORKER=localhost" if %sparrowdo-config; 289 | $sparrowdo-run ~= ",SPARKY_WORKER=host" if %sparrowdo-config; 290 | $sparrowdo-run ~= ",SPARKY_API_TOKEN={sparky-api-token()}" if sparky-api-token(); 291 | $sparrowdo-run ~= ",SPARKY_USE_TLS" if sparky-use-tls(); 292 | } 293 | 294 | if %sparrowdo-config { 295 | $sparrowdo-run ~= " --verbose"; 296 | } 297 | 298 | if %sparrowdo-config { 299 | $sparrowdo-run ~= " --debug"; 300 | } 301 | 302 | $sparrowdo-run ~= " --color"; # enable color output 303 | 304 | %sparrowdo-config = True if %sparrowdo-config; 305 | 306 | if %sparrowdo-config { 307 | $sparrowdo-run ~= " --bootstrap"; 308 | } 309 | 310 | say "merged sparrowdo configuration: {Dump(%sparrowdo-config)}"; 311 | 312 | my $run-dir = $dir; 313 | 314 | if %trigger { 315 | 316 | $run-dir = %trigger; 317 | 318 | } 319 | 320 | if $trigger { 321 | say "moving trigger to {$build-cache-dir}/{$trigger.IO.basename} ..."; 322 | my %t = EVALFILE($trigger); 323 | %t = %sparrowdo-config; 324 | unlink $trigger; 325 | "{$build-cache-dir}/{$trigger.IO.basename}".IO.spurt(%t.perl); 326 | } 327 | 328 | if $make-report { 329 | my $report-file = "$reports-dir/build-$build_id.txt"; 330 | shell("export SP6_FORMAT_COLOR=1 && cd $run-dir && $sparrowdo-run 1>$report-file" ~ ' 2>&1'); 331 | } else{ 332 | shell("export SP6_FORMAT_COLOR=1 && cd $run-dir && $sparrowdo-run" ~ ' 2>&1'); 333 | } 334 | 335 | 336 | if $make-report { $dbh.do("UPDATE builds SET state = 1 WHERE id = $build_id"); 337 | say "BUILD SUCCEED $project" ~ '@' ~ $build_id; 338 | $SPARKY-BUILD-STATE="OK"; 339 | "{$build-state-dir}/{$job-id}".IO.spurt(1); 340 | } else { 341 | say "BUILD SUCCEED <$project>"; 342 | $SPARKY-BUILD-STATE="OK"; 343 | "{$build-state-dir}/{$job-id}".IO.spurt(1); 344 | } 345 | 346 | CATCH { 347 | 348 | # will definitely catch all the exception 349 | default { 350 | warn .say; 351 | if $make-report { 352 | say "BUILD FAILED $project" ~ '@' ~ $build_id; 353 | $dbh.do("UPDATE builds SET state = -1 WHERE id = $build_id"); 354 | $SPARKY-BUILD-STATE="FAILED"; 355 | "{$build-state-dir}/{$job-id}".IO.spurt(-1); 356 | } else { 357 | say "BUILD FAILED <$project>"; 358 | $SPARKY-BUILD-STATE="FAILED"; 359 | "{$build-state-dir}/{$job-id}".IO.spurt(-1); 360 | } 361 | } 362 | 363 | } 364 | 365 | # remove old builds 366 | 367 | if %config and $make-report { 368 | 369 | say "keep builds: " ~ %config; 370 | 371 | my $sth = $dbh.prepare(q:to/STATEMENT/); 372 | SELECT id, job_id from builds where project = ? order by id asc 373 | STATEMENT 374 | 375 | $sth.execute($project); 376 | 377 | my @rows = $sth.allrows(); 378 | 379 | my $all-builds = @rows.elems; 380 | 381 | $sth.finish; 382 | 383 | my $remove-builds = $all-builds - %config; 384 | 385 | if $remove-builds > 0 { 386 | my $i=0; 387 | for @rows -> @r { 388 | $i++; 389 | my $bid = @r[0]; 390 | my $job-id = @r[1]; 391 | if $i <= $remove-builds { 392 | if $dbh.do("delete from builds WHERE id = $bid") { 393 | say "remove build database entry: $project" ~ '@' ~ $bid; 394 | } else { 395 | say "!!! can't remove build database entry: <$project>" ~ '@' ~ $bid; 396 | } 397 | if unlink "$reports-dir/build-$bid.txt".IO { 398 | say "remove report: $reports-dir/build-$bid.txt"; 399 | } else { 400 | say "!!! can't remove report: $reports-dir/build-$bid.txt"; 401 | } 402 | if $job-id { 403 | if unlink "{$build-cache-dir}/{$job-id}".IO { 404 | say "remove trigger cache: {$build-cache-dir}/{$job-id}"; 405 | } else { 406 | say "!!! can't remove trigger cache: {$build-cache-dir}/{$job-id}"; 407 | } 408 | if unlink "{$build-state-dir}/{$job-id}".IO { 409 | say "remove state cache: {$build-state-dir}/{$job-id}"; 410 | } else { 411 | say "!!! can't remove state cache: {$build-state-dir}/{$job-id}"; 412 | } 413 | if "{$build-files-dir}/{$job-id}".IO ~~ :d { 414 | if rmtree "{$build-files-dir}/{$job-id}" { 415 | say "remove files dir: {$build-files-dir}/{$job-id}"; 416 | } else { 417 | say "!!! can't remove files dir: {$build-files-dir}/{$job-id}"; 418 | } 419 | } 420 | } 421 | 422 | } 423 | 424 | } 425 | 426 | } 427 | 428 | } 429 | 430 | 431 | } 432 | 433 | sub read-config ( $dir ) { 434 | 435 | my %config = Hash.new; 436 | 437 | if "$dir/sparky.yaml".IO ~~ :f { 438 | my $yaml-str = slurp "$dir/sparky.yaml"; 439 | $yaml-str ~~ s:g/'%' BUILD '-' ID '%'/$SPARKY-BUILD-ID/ if $SPARKY-BUILD-ID; 440 | $yaml-str ~~ s:g/'%' BUILD '-' STATE '%'/$SPARKY-BUILD-STATE/ if $SPARKY-BUILD-STATE; 441 | $yaml-str ~~ s:g/'%' PROJECT '%'/$SPARKY-PROJECT/ if $SPARKY-PROJECT; 442 | %config = load-yaml($yaml-str); 443 | 444 | } 445 | 446 | return %config; 447 | 448 | } 449 | 450 | LEAVE { 451 | 452 | # Run Sparky plugins 453 | 454 | my %config = read-config($DIR); 455 | 456 | if %config { 457 | for %config.kv -> $plg-name, $plg-data { 458 | my %plg-params = $plg-data || %(); 459 | my $run-scope = $plg-data || 'anytime'; 460 | 461 | #say "$plg-name, $run-scope, $SPARKY-BUILD-STATE"; 462 | if ( $run-scope eq "fail" and $SPARKY-BUILD-STATE ne "FAILED" ) { 463 | next; 464 | } 465 | 466 | if ( $run-scope eq "success" and $SPARKY-BUILD-STATE ne "OK" ) { 467 | next; 468 | } 469 | 470 | say "Load Sparky plugin $plg-name ..."; 471 | require ::($plg-name); 472 | say "Run Sparky plugin $plg-name ..."; 473 | ::($plg-name ~ '::&run')( 474 | { 475 | project => $SPARKY-PROJECT, 476 | build-id => $SPARKY-BUILD-ID, 477 | build-state => $SPARKY-BUILD-STATE, 478 | }, 479 | %plg-params 480 | ); 481 | 482 | } 483 | } 484 | 485 | say ">>>>>>>>>>>>>>>>>>>>>>>>>>>"; 486 | say "BUILD SUMMARY"; 487 | say "STATE: $SPARKY-BUILD-STATE"; 488 | say "PROJECT: $SPARKY-PROJECT"; 489 | say "CONFIG: " ~ Dump(%config, :color(!$MAKE-REPORT)); 490 | say ">>>>>>>>>>>>>>>>>>>>>>>>>>>"; 491 | 492 | 493 | # run downstream project 494 | if %config { 495 | 496 | say "SCHEDULE BUILD for DOWNSTREAM project <" ~ %config ~ "> ... \n"; 497 | 498 | my $downstream_dir = ("$DIR/../" ~ %config).IO.absolute; 499 | 500 | my $id = "{('a' .. 'z').pick(20).join('')}{$*PID}"; 501 | 502 | mkdir "$downstream_dir/.triggers"; 503 | 504 | "{$downstream_dir}/.triggers/{$id}".IO.spurt("%( 505 | description => 'triggered by {$SPARKY-PROJECT}\@{$SPARKY-BUILD-ID}', 506 | )"); 507 | 508 | # fixme: we need to set --make-report 509 | # to trigger file 510 | # so that schedule-build function 511 | # inherit make-report option 512 | # from ustream build 513 | 514 | 515 | } 516 | 517 | } 518 | -------------------------------------------------------------------------------- /bin/sparkyd: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env raku 2 | 3 | use Sparky; 4 | 5 | sub MAIN ( 6 | Str :$root = %*ENV ~ '/.sparky/projects', 7 | Str :$work-root = %*ENV ~ '/.sparky/work', 8 | Int :$timeout = %*ENV || 10, 9 | ) 10 | 11 | { 12 | 13 | my %flappers; 14 | 15 | while True { 16 | 17 | for dir($root) -> $dir { 18 | 19 | next if "$dir".IO ~~ :f; 20 | next if $dir.basename eq '.git'; 21 | next if $dir.basename eq '.reports'; 22 | next if $dir.basename eq 'db.sqlite3-journal'; 23 | next unless "$dir/sparrowfile".IO ~~ :f; 24 | 25 | if "$dir/sparky.yaml".IO !~~ :f { 26 | next unless "{$dir}/.triggers/".IO ~~ :d; 27 | my @a = dir("{$dir}/.triggers/"); 28 | next unless @a.elems; 29 | } 30 | 31 | mkdir $root; 32 | mkdir $work-root; 33 | 34 | my $project = $dir.IO.basename; 35 | 36 | mkdir "$work-root/$project/.lock"; 37 | 38 | if %flappers{$dir}:exists && %flappers{$dir} >= 5 && sparky-with-flapper() { 39 | say "{DateTime.now} --- scm: ! project {$dir.basename} marked as a flapper and skipped from scheduling"; 40 | } else { 41 | my $status = schedule-build($dir); 42 | if defined($status) { 43 | # mark project a flapper 44 | # if more then 3 timeouts happened 45 | if $status eq "124" { 46 | %flappers{$dir} ||= 0; 47 | %flappers{$dir}++; 48 | } 49 | } 50 | } 51 | } 52 | 53 | sleep($timeout); 54 | 55 | } 56 | 57 | } 58 | 59 | -------------------------------------------------------------------------------- /bin/sparman.raku: -------------------------------------------------------------------------------- 1 | sub MAIN( 2 | Str $comp, 3 | Str $action, 4 | Bool :$verbose? = False, 5 | Str :$base?, 6 | Str :$env?, 7 | ) { 8 | 9 | say "Execute $action on $comp ..."; 10 | 11 | die "unknown component" unless $comp ~~ /^^ (worker|worker_ui) $$/; 12 | 13 | my $c = _get_conf(); 14 | my $vars = $env ?? $env.split(/","/).map({"export $_"}).join("\n") !! ""; 15 | 16 | if $comp eq "worker_ui" { 17 | if ! $c and $action eq "start" { 18 | say "worker ui base dir not found, tell me where to look it up:"; 19 | say "sparman.raku --base /path/to/basedir worker_ui conf"; 20 | exit(1) 21 | } 22 | if $action eq "start" { 23 | my $cmd = q[ 24 | set -e 25 | pid=$(ps uax|grep "raku bin/sparky-web.raku"|grep -v grep | awk '{ print $2 }') 26 | if test -z $pid; then ] ~ 27 | qq[\ncd {$c}\n] ~ 28 | q[mkdir -p ~/.sparky ] ~ 29 | qq[\n$vars\n] ~ 30 | q[nohup raku bin/sparky-web.raku 1>~/.sparky/sparky-web.log 2>&1 < /dev/null & 31 | echo "run [OK]" 32 | else 33 | echo "already running pid=$pid ..." 34 | fi 35 | ]; 36 | say $cmd if $verbose; 37 | shell $cmd; 38 | } elsif $action eq "stop" { 39 | my $cmd = q[ 40 | set -e 41 | pid=$(ps uax|grep "raku bin/sparky-web.raku"|grep -v grep | awk '{ print $2 }') 42 | if test -z $pid; then 43 | echo "already stopped" 44 | else 45 | echo "kill $pid ..." 46 | kill $pid 47 | echo "stop [OK] | pid=$pid" 48 | fi 49 | ]; 50 | say $cmd if $verbose; 51 | shell $cmd; 52 | } elsif $action eq "conf" { 53 | if $base { 54 | $c = $base; 55 | _update_conf($c); 56 | } 57 | } elsif $action eq "status" { 58 | my $cmd = q[ 59 | set -e 60 | pid=$(ps uax|grep "raku bin/sparky-web.raku"|grep -v grep | awk '{ print $2 }') 61 | if test -z $pid; then 62 | echo "stop [OK]" 63 | else 64 | echo "run [OK] | pid=$pid" 65 | fi 66 | ]; 67 | say $cmd if $verbose; 68 | shell $cmd; 69 | } else { 70 | die "unknown action" 71 | } 72 | } 73 | if $comp eq "worker" { 74 | if $action eq "start" { 75 | my $cmd = q[ 76 | set -e 77 | pid=$(ps uax|grep bin/sparkyd|grep rakudo|grep -v grep | awk '{ print $2 }') 78 | if test -z $pid; then 79 | mkdir -p ~/.sparky/] ~ 80 | qq[\n$vars\n] ~ 81 | q[nohup sparkyd 1>~/.sparky/sparkyd.log 2>&1 < /dev/null & 82 | echo "run [OK]" 83 | else 84 | echo "already running pid=$pid ..." 85 | fi 86 | ]; 87 | say $cmd if $verbose; 88 | shell $cmd; 89 | } elsif $action eq "stop" { 90 | my $cmd = q[ 91 | set -e 92 | pid=$(ps uax|grep bin/sparkyd|grep rakudo|grep -v grep | awk '{ print $2 }') 93 | if test -z $pid; then 94 | echo "already stopped" 95 | else 96 | echo "kill $pid ..." 97 | kill $pid 98 | echo "stop [OK] | pid=$pid" 99 | fi 100 | ]; 101 | say $cmd if $verbose; 102 | shell $cmd; 103 | } elsif $action eq "status" { 104 | my $cmd = q[ 105 | set -e 106 | pid=$(ps uax|grep bin/sparkyd|grep rakudo|grep -v grep | awk '{ print $2 }') 107 | if test -z $pid; then 108 | echo "stop [OK]" 109 | else 110 | echo "run [OK] | pid=$pid" 111 | fi 112 | ]; 113 | say $cmd if $verbose; 114 | shell $cmd; 115 | } else { 116 | die "unknown action" 117 | } 118 | } 119 | 120 | } 121 | 122 | sub _get_conf { 123 | if "{%*ENV}/.sparky/conf.raku".IO ~~ :e { 124 | EVALFILE "{%*ENV}/.sparky/conf.raku" 125 | } else { 126 | return {} 127 | } 128 | } 129 | 130 | sub _update_conf (%c) { 131 | mkdir "{%*ENV}/.sparky/"; 132 | say "update {%*ENV}/.sparky/conf.raku ..."; 133 | "{%*ENV}/.sparky/conf.raku".IO.spurt(%c.perl) 134 | } 135 | -------------------------------------------------------------------------------- /conf/sparky-cluster.raku: -------------------------------------------------------------------------------- 1 | %( 2 | workers => [ 3 | %( 4 | name => "spk01", 5 | host => "spk01.eastus.cloudapp.azure.com", 6 | ssh-user => "sparky", 7 | api => "https://spk01.eastus.cloudapp.azure.com:4000" 8 | ), 9 | ], 10 | ) 11 | -------------------------------------------------------------------------------- /css/style.css: -------------------------------------------------------------------------------- 1 | a[href] { 2 | color: black; 3 | } 4 | -------------------------------------------------------------------------------- /db-init.raku: -------------------------------------------------------------------------------- 1 | use v6; 2 | use DBIish; 3 | use Data::Dump; 4 | use Sparky; 5 | 6 | sub MAIN ( 7 | Str :$root = %*ENV ~ '/.sparky/projects', 8 | ) 9 | 10 | { 11 | 12 | mkdir $root; 13 | 14 | my %conf = get-sparky-conf(); 15 | 16 | my $dbh; 17 | my $engine; 18 | my $db-name; 19 | 20 | say "config: " ~ Dump(%conf); 21 | 22 | if %conf && %conf && %conf !~~ / :i sqlite / { 23 | $engine = %conf; 24 | $db-name = %conf; 25 | $dbh = DBIish.connect( 26 | $engine, 27 | host => %conf, 28 | port => %conf, 29 | database => %conf, 30 | user => %conf, 31 | password => %conf, 32 | ); 33 | 34 | } else { 35 | 36 | $engine = 'SQLite'; 37 | $db-name = "$root/db.sqlite3"; 38 | $dbh = DBIish.connect("SQLite", database => $db-name ); 39 | 40 | } 41 | 42 | $dbh.do(q:to/STATEMENT/); 43 | DROP TABLE IF EXISTS builds 44 | STATEMENT 45 | 46 | if $engine ~~ /:i sqlite/ { 47 | 48 | $dbh.do(q:to/STATEMENT/); 49 | CREATE TABLE builds ( 50 | id INTEGER PRIMARY KEY AUTOINCREMENT, 51 | project varchar(255), 52 | job_id varchar(255), 53 | description TEXT, 54 | state int, 55 | dt datetime default current_timestamp 56 | ) 57 | STATEMENT 58 | 59 | } elsif $engine ~~ /:i mysql/ { 60 | 61 | $dbh.do(q:to/STATEMENT/); 62 | CREATE TABLE builds ( 63 | id int primary key auto_increment, 64 | project varchar(255), 65 | job_id varchar(255), 66 | description varchar(255), 67 | state int, 68 | dt timestamp default CURRENT_TIMESTAMP 69 | ) CHARACTER SET utf8 70 | STATEMENT 71 | 72 | } elsif $engine ~~ /:i pg/ { 73 | 74 | $dbh.do(q:to/STATEMENT/); 75 | CREATE TABLE builds ( 76 | id SERIAL PRIMARY KEY, 77 | project varchar(255), 78 | job_id varchar(255), 79 | description varchar(255), 80 | state int, 81 | dt timestamp default CURRENT_TIMESTAMP 82 | ) 83 | STATEMENT 84 | 85 | } 86 | 87 | say "$engine db populated as $db-name"; 88 | 89 | $dbh.dispose; 90 | 91 | } 92 | 93 | 94 | 95 | -------------------------------------------------------------------------------- /docs/acl.md: -------------------------------------------------------------------------------- 1 | # Sparky ACL 2 | 3 | Sparky ACL allows to create access control lists to manage role based access to Sparky resources 4 | 5 | # Creation of ACL 6 | 7 | Create list.yaml file located at `SPARKY_ROOT/acl/list.yaml` path, for example: 8 | 9 | ```yaml 10 | global: 11 | allow: 12 | users: 13 | - alexey.melezhik 14 | - john.brown 15 | 16 | projects: 17 | hello-world: 18 | allow: 19 | users: 20 | - "*" 21 | service-logs: 22 | allow: 23 | users: 24 | - "*" 25 | ``` 26 | 27 | In this example we allow users alexey.melezhik and john.brown to run run any jobs, 28 | and allow _all_ users run jobs hello-world and service-logs. 29 | 30 | # ACL flow 31 | 32 | ACL flow is strict, if an action is not allowed explicitly it's implicitly denied, 33 | for example in this case: 34 | 35 | ```yaml 36 | global: 37 | allow: 38 | users: 39 | - alexey.melezhik 40 | - john.brown 41 | ``` 42 | 43 | All users besides alexey.melezhik and john.brown are denied to run any project 44 | 45 | # User IDs 46 | 47 | User IDs are supplied by oauth provider during authentication phase, 48 | usually those are user accounts in oauth external server. 49 | 50 | For example, in case of GitLab oauth provider user IDs are gitlab accounts 51 | 52 | # Host specific ACLs 53 | 54 | To _override_ default ACL (located at `SPARKY_ROOT/acl/list.yaml`) one has 55 | to specify list.yaml file located at `SPARKY_ROOT/acl/hosts/$host/list.yaml`, 56 | where $host is a hostname (output of `hostname` command) of host where Sparky 57 | runs, this allows to maintain multiple ACL configurations for many Sparky instances: 58 | 59 | ``` 60 | acl/hosts/host-foo/list.yaml 61 | acl/hosts/host-bar/list.yaml 62 | acl/hosts/host-baz/list.yaml 63 | ``` 64 | 65 | Host specific ACL overrides default ACL and has the same semantic. 66 | 67 | # Explicit deny 68 | 69 | To explicitly deny a user from a job execution, use deny directive: 70 | 71 | ``` 72 | projects: 73 | hello-world: 74 | allow: 75 | users: 76 | - "*" 77 | deny: 78 | users: 79 | - bad_guy 80 | ``` 81 | 82 | This code allows all users to execute hello-world sparky project, besides a user with a login bad_guy 83 | 84 | ## Access to everyone 85 | 86 | To allow any user to run any resources just remove any list.yaml files from Sparky configuration 87 | -------------------------------------------------------------------------------- /docs/api.md: -------------------------------------------------------------------------------- 1 | # HTTP API 2 | 3 | Sparky HTTP API allows execute Sparky jobs remotely over HTTP 4 | 5 | ## Trigger Sparky job 6 | 7 | ```http 8 | POST /build/project/$project 9 | ``` 10 | 11 | Returns `$key` - unique build identification ( aka Sparky Job ID ) 12 | 13 | ## Trigger job with parameters 14 | 15 | ```http 16 | POST /build-with-tags/project/$project @json 17 | ``` 18 | 19 | For example: 20 | 21 | Request data - `request.json`: 22 | 23 | ```json 24 | { 25 | "description" : "test build", 26 | "tags" : "message=hello,from=Sparky" 27 | } 28 | ``` 29 | 30 | Request via curl: 31 | 32 | ```bash 33 | curl -k -H "Content-Type: application/json" \ 34 | --data "@request.json" \ 35 | https://127.0.0.1:4000/build-with-tags/project/hello-world 36 | ``` 37 | 38 | Will trigger a job `hello-world`, with named parameters `message` and `from`. 39 | 40 | Parameters are handled within Sparky scenario as: 41 | 42 | ```raku 43 | my $message = tags(); 44 | my $from = tags(); 45 | ``` 46 | 47 | ## Job status 48 | 49 | Get job status - status of the last executed build: 50 | 51 | ```http 52 | GET /status/$project/$key 53 | ``` 54 | 55 | Returns `$status`: 56 | 57 | * `0` - build is running 58 | 59 | * `-1` - build failed 60 | 61 | * `1` - build finished successfully 62 | 63 | * `-2` - unknown state ( build does not exist or is placed in a queue ) 64 | 65 | ## Badges 66 | 67 | Get job badge - image with status of the last executed build 68 | 69 | ```http 70 | GET /badge/$project 71 | ``` 72 | 73 | ## Job report 74 | 75 | Get build report in raw text format 76 | 77 | ```http 78 | GET /report/raw/$project/$key 79 | ``` 80 | -------------------------------------------------------------------------------- /docs/auth.md: -------------------------------------------------------------------------------- 1 | # Auth 2 | 3 | Sparky authentication protocols 4 | 5 | # Synopsis 6 | 7 | Sparky comes with two authentication protocols: 8 | 9 | * Default 10 | 11 | * Oauth2 12 | 13 | ## Default protocol 14 | 15 | Default protocol is a simplest one, when you don't need anything fancy, 16 | but authentication, just add these section to your `~/.sparky.yaml` file: 17 | 18 | ```yaml 19 | auth: 20 | default: true 21 | users: 22 | - 23 | login: admin 24 | password: 80ffc4a1fb71d117b0d74337c5943bf2 25 | - 26 | login: operator 27 | password: 223e2baafd4e70dcffe70420cfcca615 28 | ``` 29 | 30 | Here we have 2 logins - admin and operator, with md5summed passwords 31 | admin_password and operator_password, that is it, plain and simple. 32 | 33 | Now we can even setup ACL polices for those account using [ACL](https://github.com/melezhik/sparky/blob/master/docs/acl.md) 34 | 35 | ```yaml 36 | global: 37 | allow: 38 | users: 39 | - admin 40 | 41 | projects: 42 | maintain: 43 | allow: 44 | users: 45 | - operator 46 | ``` 47 | 48 | Default login and password (if auth section is not set at all): 49 | 50 | * login - admin 51 | * password - admin 52 | 53 | ## OAUTH 2.0 protocol 54 | 55 | For more secure scenario use [oauth2](https://oauth.net/2/) authentication 56 | protocol. 57 | 58 | To enable oauth2, add following section to `~/sparky.yaml` configuration file ( 59 | example for Gitlab provider): 60 | 61 | ```yaml 62 | auth: 63 | provider: gitlab 64 | provider_url: https://gitlab.host/oauth # URL for authentication 65 | redirect_url: http://sparky.host:4000/oauth2 # should be something your_sparky_host/oauth2 66 | user_api: https://gitlab.host/api/v4/user # API to fetch user data, example for gitlab 67 | scope: "openid email read_user" # scopes enabled for oauth token 68 | # generate client_id, client_secret when create sparky application in gitlab 69 | client_id: aaabbbcccdddeeefffggghhhiiijjjkkklllmmmnnnooopppqqqrrrssstttuuuvvvww 70 | client_secret: 01020102aa01020102bb01020102cc01020102dddd00ee00ff0010101ff0101f 71 | state: hellosparky # this is optional 72 | ``` 73 | 74 | For now only GitLab oauth2 provider is supported 75 | 76 | # See also 77 | 78 | ## ACL 79 | 80 | Sparky ACL allows to create access control lists to manage role based access to Sparky resources, see [docs/acl.md](https://github.com/melezhik/sparky/blob/master/docs/acl.md) 81 | -------------------------------------------------------------------------------- /docs/cli.md: -------------------------------------------------------------------------------- 1 | # Cli 2 | 3 | Command line client for Sparky 4 | 5 | ## Trigger jobs 6 | 7 | To trigger Sparky job in terminal use `sparky-runner.raku` cli: 8 | 9 | ```bash 10 | $ sparky-runner.raku --dir=/home/user/.sparky/projects/teddy-bear-app 11 | ``` 12 | 13 | Or just: 14 | 15 | ```bash 16 | $ cd ~/.sparky/projects/teddy-bear-app && sparky-runner.raku 17 | ``` 18 | 19 | ## Sparky runtime parameters 20 | 21 | Runtime parameters could be overridden by command line ( `--root`, `--work-root` ) 22 | 23 | ### root directory 24 | 25 | Directory where scheduler looks for job scenarios, by default: 26 | 27 | ```bash 28 | ~/.sparky/projects/ 29 | ``` 30 | 31 | ### work directory 32 | 33 | Directory where scheduler keeps internal jobs data: 34 | 35 | ```bash 36 | ~/.sparky/work 37 | ``` 38 | -------------------------------------------------------------------------------- /docs/database.md: -------------------------------------------------------------------------------- 1 | # Databases 2 | 3 | Sparky keeps it's data in database, by default it uses sqlite, 4 | following databases are supported: 5 | 6 | * SQLite 7 | * MySQL/MariaDB 8 | * PostgreSQL 9 | 10 | ## Configuring database 11 | 12 | Following is an example for MySQL database, the same rules are applied for other database, 13 | like PostgreSQL, etc. 14 | 15 | ### Create Sparky configuration file 16 | 17 | You should defined database engine and connection parameters, say we want to use MySQL: 18 | 19 | ```bash 20 | $ nano ~/sparky.yaml 21 | ``` 22 | 23 | With content: 24 | 25 | ```yaml 26 | database: 27 | engine: mysql 28 | host: $dbhost 29 | port: $dbport 30 | name: $dbname 31 | user: $dbuser 32 | pass: $dbpassword 33 | ``` 34 | 35 | For example: 36 | 37 | ```yaml 38 | database: 39 | engine: mysql 40 | host: "127.0.0.1" 41 | port: 3306 42 | name: sparky 43 | user: sparky 44 | pass: "123" 45 | ``` 46 | 47 | ### Installs dependencies 48 | 49 | Depending on platform it should be client needed for your database API, for example for Debian we have to: 50 | 51 | ```bash 52 | $ sudo yum install mysql-client 53 | ``` 54 | 55 | ### Creating database user, password and schema 56 | 57 | DB init script will generate database schema, provided that user defined and sparky configuration file has access to 58 | the database: 59 | 60 | ```bash 61 | $ raku db-init.raku 62 | ``` 63 | 64 | That is it, now Sparky runs under MySQL! 65 | 66 | -------------------------------------------------------------------------------- /docs/downstream.md: -------------------------------------------------------------------------------- 1 | # Downstream jobs 2 | 3 | Downstream jobs get run after some _main_ job has finished. 4 | 5 | One define job as downstream by referencing to downstream job in main job: 6 | 7 | ```yaml 8 | # definition of main job here 9 | # job named cleanup will be executed 10 | # after the main job 11 | downstream: cleanup 12 | ``` 13 | 14 | Downstream jobs could be chained, so one could defined downstream within another 15 | downstream job. For more advanced and effective job orchestration consider 16 | Sparky Job API - [docs/job_api.md](https://github.com/melezhik/sparky/blob/master/docs/job_api.md) 17 | -------------------------------------------------------------------------------- /docs/env.md: -------------------------------------------------------------------------------- 1 | # Env 2 | 3 | Sparky Environment variables 4 | 5 | ## SPARKY_SKIP_CRON 6 | 7 | You can disable cron check to run project forcefully, by setting `SPARKY_SKIP_CRON` environment variable: 8 | 9 | ```bash 10 | $ export SPARKY_SKIP_CRON=1 && sparkyd 11 | ``` 12 | 13 | ## SPARKY_MAX_JOBS 14 | 15 | Threshold of concurrent jobs maximum number. Use it to protect Sparky server from overload. 16 | 17 | (WARNING! This variable is not currently supported) 18 | 19 | ## SPARKY_FLAPPERS_OFF 20 | 21 | Disable flappers mechanism, see "Flappers mechanism" section. 22 | 23 | ## SPARKY_ROOT 24 | 25 | Sets the sparky root directory 26 | 27 | ## SPARKY_HTTP_ROOT 28 | 29 | Set sparky web application http root. Useful when proxy application through Nginx: 30 | 31 | SPARKY_HTTP_ROOT='/sparky' cro run 32 | 33 | ## SPARKY_TIMEOUT 34 | 35 | Sets timeout for sparky workers, see [Running daemon](#running-daemon) section. 36 | 37 | ## SPARKY_JOB_TIMEOUT 38 | 39 | How many seconds wait till a job is considered as timeouted (used in Sparky Job API calls). -------------------------------------------------------------------------------- /docs/glossary.md: -------------------------------------------------------------------------------- 1 | # Glossary 2 | 3 | Some useful glossary 4 | 5 | # Job aka Sparky project 6 | 7 | Raku scenario gets executed on some event and does some useful job 8 | 9 | # Sparky scenario 10 | 11 | Implementation of Sparky job written on Raku 12 | 13 | # Build 14 | 15 | A specific instance of Sparky job. Usually reports are visible though UI and 16 | might have some artifacts 17 | 18 | # Report 19 | 20 | Log of Sparky job execution 21 | 22 | # Artifact 23 | 24 | Some byproducts ( technically are files) attached to a build and visible through UI 25 | 26 | # UI aka Sparky web UI 27 | 28 | Web application to run Sparky jobs and get their reports 29 | 30 | # Sparrowdo 31 | 32 | A client to run Sparky jobs on (remote) hosts and docker containers 33 | 34 | # Sparrow 35 | 36 | Underlying automation framework to execute Sparky job 37 | 38 | # sparky.yaml 39 | 40 | YAML definition of Sparky job meta information, like input parameters, UI controls, 41 | triggering logic, etc. Every Sparky job has a sparky.yaml file 42 | 43 | 44 | # root directory 45 | 46 | Directory where scheduler looks for job scenarios, by default: 47 | 48 | ```bash 49 | ~/.sparky/projects/ 50 | ``` 51 | 52 | # work directory 53 | 54 | Directory where scheduler keeps internal jobs data: 55 | 56 | ```bash 57 | ~/.sparky/work 58 | ``` 59 | -------------------------------------------------------------------------------- /docs/job_api.md: -------------------------------------------------------------------------------- 1 | # Job API 2 | 3 | Job API allows to orchestrate multiple Sparky jobs. 4 | 5 | For example: 6 | 7 | ```raku 8 | if tags() eq "main" { 9 | 10 | use Sparky::JobApi; 11 | 12 | my $j = Sparky::JobApi.new; 13 | 14 | $j.queue({ 15 | description => "spawned job", 16 | tags => %( 17 | stage => "child", 18 | foo => 1, 19 | bar => 2, 20 | ), 21 | }); 22 | 23 | say "job info: ", $j.info.perl; 24 | 25 | } elsif tags() eq "child" { 26 | 27 | say "I am a child scenario"; 28 | say "config: ", config().perl; 29 | say "tags: ", tags().perl; 30 | 31 | } 32 | ``` 33 | 34 | In this example the same scenario runs for a main and child job, but 35 | code is conditionally branched off based on a `tags()` value: 36 | 37 | ```bash 38 | sparrowdo --localhost --no_sudo --with_sparky --tags=stage=main 39 | ``` 40 | 41 | ## Job attributes 42 | 43 | A child job inherits all the main job attributes, including sparrowfile, tags, configuration file 44 | and sparrowdo configuration. 45 | 46 | To override some job configuration attributes, use `sparrowdo` and `tags` parameters: 47 | 48 | ```raku 49 | my $j = Sparky::JobApi.new; 50 | $j.queue({ 51 | tags => %( 52 | stage => "child", 53 | foo => 1, 54 | bar => 2, 55 | ), 56 | sparrowdo => %( 57 | no_index_update => True, 58 | no_sudo => True, 59 | docker => "debian_bullseye" 60 | ) 61 | }); 62 | ``` 63 | 64 | Follow [sparrowdo cli](https://github.com/melezhik/sparrowdo#sparrowdo-cli) documentation for `sparrowdo` parameters explanation. 65 | 66 | ## Set a project for spawned job 67 | 68 | One can choose to set a job project either explicitly: 69 | 70 | ```raku 71 | my $j = Sparky::JobApi.new: project; 72 | $j.queue({ 73 | description => "spawned job", 74 | }); 75 | ``` 76 | 77 | The code will spawn a new job for a project called "spawned_job" 78 | 79 | Or implicitly, with _auto generated_ project name: 80 | 81 | ```raku 82 | my $j = Sparky::JobApi.new; 83 | $j.queue({ 84 | description => "spawned job", 85 | }); 86 | ``` 87 | 88 | This code will spawn a new job on project named `$currect_project.spawned_$random_number` 89 | 90 | Where `$random_number` is random integer number taken from a default range - `1..4`. 91 | 92 | To increase a level of parallelism, use `workers` parameter: 93 | 94 | ```raku 95 | for 1 .. 10 { 96 | my $j = Sparky::JobApi.new: :workers<10>; 97 | $j.queue({ 98 | description => "spawned job" 99 | }); 100 | } 101 | ``` 102 | 103 | For this case a random number will be taken from a range `1..10`. 104 | 105 | ## Asynchronous (none blocking) wait of child jobs 106 | 107 | Main scenario could asynchronously wait a child job 108 | using Raku `supply|tap` method: 109 | 110 | ```raku 111 | if tags() eq "main" { 112 | 113 | # spawns a child job 114 | 115 | use Sparky::JobApi; 116 | my $j = Sparky::JobApi.new: :project; 117 | $j.queue({ 118 | description => "my spawned job", 119 | tags => %( 120 | stage => "child", 121 | foo => 1, 122 | bar => 2, 123 | ), 124 | }); 125 | 126 | say "queue spawned job, ",$j.info.perl; 127 | 128 | my $supply = supply { 129 | 130 | while True { 131 | 132 | my $status = $j.status; 133 | 134 | emit %( job-id => $j.info, status => $status ); 135 | 136 | done if $status eq "FAIL" or $status eq "OK"; 137 | 138 | } 139 | } 140 | 141 | $supply.tap( -> $v { 142 | say $v; 143 | }); 144 | } elsif tags() eq "child" { 145 | 146 | # child job here 147 | 148 | say "config: ", config().perl; 149 | say "tags: ", tags().perl; 150 | 151 | } 152 | ``` 153 | 154 | ## Recursive jobs 155 | 156 | Recursive jobs are when a child job spawns another job and so on. 157 | 158 | Be careful not to end up in endless recursion: 159 | 160 | ```raku 161 | use Sparky::JobApi; 162 | 163 | if tags() eq "main" { 164 | 165 | my $j = Sparky::JobApi.new: :project; 166 | 167 | $j.queue({ 168 | description => "spawned job", 169 | tags => %( 170 | stage => "child", 171 | foo => 1, 172 | bar => 2, 173 | ), 174 | sparrowdo => %( 175 | no_index_update => True 176 | ) 177 | }); 178 | 179 | say "queue spawned job ", $j.info.perl; 180 | 181 | } elsif tags() eq "child" { 182 | 183 | say "I am a child scenario"; 184 | 185 | my $j = Sparky::JobApi.new: :project; 186 | 187 | $j.queue({ 188 | description => "spawned job2. 02", 189 | tags => %( 190 | stage => "off", 191 | foo => 1, 192 | bar => 2, 193 | ), 194 | }); 195 | 196 | say "queue spawned job ",$j.info.perl; 197 | 198 | } elsif tags() eq "off" { 199 | 200 | say "I am off now, good buy!"; 201 | say "config: ", config().perl; 202 | say "tags: ", tags().perl; 203 | 204 | } 205 | ``` 206 | 207 | ## Predefined job IDs 208 | 209 | Explicitly passing `job-id` allow to wait 210 | to jobs that have not yet started. 211 | 212 | Consider this scenario with recursive jobs: 213 | 214 | 215 | ```raku 216 | use Sparky::JobApi; 217 | 218 | sub wait-jobs(@q) { 219 | 220 | my @jobs; 221 | 222 | for @q -> $j { 223 | 224 | my $supply = supply { 225 | 226 | while True { 227 | 228 | my %info = $j.info; 229 | 230 | my $status = $j.status; 231 | 232 | %info = $status; 233 | 234 | emit %info; 235 | 236 | done if $status eq "FAIL" or $status eq "OK"; 237 | 238 | sleep(1); 239 | 240 | } 241 | 242 | } 243 | 244 | $supply.tap( -> $v { 245 | push @jobs, $v if $v eq "FAIL" or $v eq "OK"; 246 | say $v; 247 | }); 248 | 249 | } 250 | 251 | say @jobs.grep({$_ eq "OK"}).elems, " jobs finished successfully"; 252 | say @jobs.grep({$_ eq "FAIL"}).elems, " jobs failed"; 253 | say @jobs.grep({$_ eq "TIMEOUT"}).elems, " jobs timeouted"; 254 | 255 | } 256 | 257 | if tags() eq "main" { 258 | 259 | my $rand = ('a' .. 'z').pick(20).join(''); 260 | 261 | my $job-id = "{$rand}_1"; 262 | 263 | Sparky::JobApi.new(:project,:$job-id).queue({ 264 | description => "spawned job. 03.1", 265 | tags => %( 266 | seed => $rand, 267 | stage => "child", 268 | i => 1, 269 | ), 270 | }); 271 | 272 | my @jobs; 273 | 274 | # wait all 10 recursively launched jobs 275 | # that are not yet launched by that point 276 | # but will be launched recursively 277 | # in "child" jobs 278 | 279 | for 1 .. 10 -> $i { 280 | 281 | my $project = "worker_{$i}"; 282 | 283 | my $job-id = "{$rand}_{$i}"; 284 | 285 | my $j = Sparky::JobApi.new: :$project, :$job-id; 286 | 287 | @jobs.push: $j; 288 | 289 | } 290 | 291 | wait-jobs @jobs; 292 | 293 | } elsif tags() eq "child" { 294 | 295 | say "I am a child job!"; 296 | 297 | say tags().perl; 298 | 299 | if tags() < 10 { 300 | 301 | my $i = tags().Int + 1; 302 | 303 | # do some useful stuff here 304 | # and launch another recursive job 305 | # with predefined project and job ID 306 | # $i variable gets incremented 307 | # and all recursively launched jobs 308 | # get waited in a "main" scenario, 309 | # function wait-jobs 310 | 311 | my $project = "worker_{$i}"; 312 | my $job-id = "{tags()}_{$i}"; 313 | 314 | Sparky::JobApi.new(:$project,:$job-id).queue({ 315 | description => "spawned job. 03.{$i}", 316 | tags => %( 317 | seed => tags(), 318 | stage => "child", 319 | i => $i, 320 | ), 321 | }); 322 | } 323 | } 324 | ``` 325 | 326 | So in this scenario job IDs get generated ahead of time while jobs get launched recursively in 327 | subsequent jobs. 328 | 329 | Main scenario waits till all recursive jobs finishes in none blocking Raku `supply|tap` fashion. 330 | 331 | ## Job stash 332 | 333 | Stash is a piece of data a job could write or read. There are two ways to use stashes. 334 | 335 | When a child job writes a data and the a parent job reads it: 336 | 337 | ```raku 338 | use Sparky::JobApi; 339 | 340 | if tags() eq "main" { 341 | 342 | # spawns a child job 343 | 344 | my $j = Sparky::JobApi.new(:project); 345 | $j.queue({ 346 | description => "my spawned job", 347 | tags => %( 348 | stage => "child", 349 | foo => 1, 350 | bar => 2, 351 | ), 352 | }); 353 | 354 | say "queue spawned job, ",$j.info.perl; 355 | 356 | my $supply = supply { 357 | 358 | while True { 359 | 360 | my $status = $j.status; 361 | 362 | emit %( job-id => $j.info, status => $status ); 363 | 364 | done if $status eq "FAIL" or $status eq "OK"; 365 | 366 | sleep(5); 367 | 368 | } 369 | } 370 | 371 | $supply.tap( -> $v { 372 | say $v; 373 | }); 374 | 375 | # read a data from child job 376 | say $j.get-stash().perl; 377 | 378 | 379 | } elsif tags() eq "child" { 380 | 381 | # child job here 382 | 383 | say "config: ", config().perl; 384 | say "tags: ", tags().perl; 385 | 386 | my $j = Sparky::JobApi.new( mine => True ); 387 | 388 | # puts a data so that other jobs could read it 389 | $j.put-stash({ hello => "Sparky" }); 390 | 391 | } 392 | ``` 393 | 394 | When a parent job writes a data to a child job ( before it's spawned ) and 395 | then a child job reads it: 396 | 397 | ```raku 398 | use Sparky::JobApi; 399 | 400 | if tags() eq "main" { 401 | 402 | # spawns a child job 403 | 404 | my $j = Sparky::JobApi.new(:project); 405 | 406 | # prepare a data for a child job 407 | # so that when it starts 408 | # it could read it 409 | 410 | $j.put-stash({ hello => "world" }); 411 | 412 | $j.queue({ 413 | description => "my spawned job", 414 | tags => %( 415 | stage => "child", 416 | foo => 1, 417 | bar => 2, 418 | ), 419 | }); 420 | 421 | say "queue spawned job, ",$j.info.perl; 422 | 423 | wait-jobs(($j,)) # wait till a job has finished 424 | 425 | } elsif tags() eq "child" { 426 | 427 | # child job here 428 | 429 | say "config: ", config().perl; 430 | say "tags: ", tags().perl; 431 | 432 | # read a data prepared by a parent job 433 | 434 | my $j = Sparky::JobApi.new( mine => True ); 435 | 436 | say $j.get-stash().perl; 437 | 438 | } 439 | ``` 440 | 441 | In general form a job write a data to stash by using `put-stash` method: 442 | 443 | ```raku 444 | my $j = Sparky::JobApi.new(); 445 | $j.put-stash({ hello => "world", list => [ 1, 2, 3] }); 446 | $j.queue; # job will be queued and get an access to a data via `get-stash` method 447 | ``` 448 | 449 | A data written has to a be any Raku data structure that could be 450 | converted into JSON format. 451 | 452 | To read a data from a _current_ job, use `mine => True` parameter of 453 | Sparky::JobApi constructor. 454 | 455 | ```raku 456 | # read a data in this job stash 457 | my $j = Sparky::JobApi.new( mine => True ); 458 | $j.get-stash(); 459 | ``` 460 | 461 | To read a data from a _specific_ job, specify `project` and `job-id` in 462 | Sparky::JobApi constructor: 463 | 464 | ```raku 465 | # read a data from a specific job stash 466 | my $j = Sparky::JobApi.new( :$project, :$job-id ); 467 | $j.get-stash(); 468 | ``` 469 | 470 | ## Job files 471 | 472 | Job files are similar to job stash, but used to transfer files between jobs, not 473 | structured Raku hashes. 474 | 475 | Here is an example how one can share file between child and parent job: 476 | 477 | 478 | ```raku 479 | 480 | use Sparky::JobApi; 481 | 482 | class Pipeline 483 | 484 | does Sparky::JobApi::Role 485 | 486 | { 487 | 488 | method stage-main { 489 | 490 | say "hello from main ..."; 491 | 492 | my $j = self.new-job; 493 | 494 | $j.queue: %( 495 | tags => %( 496 | stage => "child" 497 | ) 498 | ); 499 | 500 | my $st = self.wait-job($j); 501 | 502 | die unless $st; 503 | 504 | say $j.get-file("README",:text); 505 | 506 | } 507 | 508 | method stage-child { 509 | 510 | say "hello from child"; 511 | 512 | my $j = Sparky::JobApi.new: mine => True; 513 | 514 | task-run "http/GET 1.png", "curl", %( 515 | args => [ 516 | %( 517 | 'output' => "{$*CWD}/README.md" 518 | ), 519 | [ 520 | 'silent', 521 | '-f', 522 | 'location' 523 | ], 524 | #'https://raw.githubusercontent.com/melezhik/images/master/1.png' 525 | 'https://raw.githubusercontent.com/melezhik/images/master/README.md' 526 | ] 527 | ); 528 | 529 | $j.put-file("{$*CWD}/README.md","README"); 530 | 531 | } 532 | 533 | } 534 | 535 | Pipeline.new.run; 536 | ``` 537 | 538 | In this example child job copy file back to a parent job using `put-file` method: 539 | 540 | * `put-file($file-path,$file-name)` 541 | 542 | Where `$file-path` is a physical file path within file system and `$file-name` - just a name 543 | how file will be accessible by other jobs. 544 | 545 | So when a file gets copied, a parent job will access it as: 546 | 547 | * `get-file($file-name)` method which return a content (*) of a file. 548 | 549 | ```raku 550 | my $data = $job->get-file("data.tar.gz"); 551 | ``` 552 | 553 | `*` - content will be returned as a binary string by default 554 | 555 | --- 556 | 557 | To force text mode, use `:text` modifier: 558 | 559 | ```raku 560 | my $text = $job->get-file: "README.md", :text; 561 | ``` 562 | 563 | 564 | ## Class API 565 | 566 | For OOP lovers there is a Sparky::JobApi::Role that implements some Sparky::JobApi-ish methods, 567 | so one can write scenarios in OOP style: 568 | 569 | 570 | ```raku 571 | use Sparky::JobApi; 572 | 573 | class Pipeline 574 | 575 | does Sparky::JobApi::Role 576 | 577 | { 578 | 579 | method stage-main { 580 | 581 | my $j = self.new-job: :project; 582 | 583 | $j.queue({ 584 | description => "spawned job. 01", 585 | tags => %( 586 | stage => "child", 587 | ), 588 | }); 589 | 590 | say "job info: ", $j.info.perl; 591 | 592 | my $st = self.wait-job($j); 593 | 594 | say $st.perl; 595 | 596 | die if $st; 597 | 598 | } 599 | 600 | method stage-child { 601 | 602 | say "I am a child scenario"; 603 | say "config: ", config().perl; 604 | say "tags: ", tags().perl; 605 | 606 | } 607 | 608 | } 609 | 610 | Pipeline.new.run; 611 | ``` 612 | 613 | To run pipeline: 614 | 615 | ```bash 616 | sparrowdo --localhost --no_sudo --with_sparky --tags=stage=main 617 | ``` 618 | 619 | Sparky::JobApi::Role methods: 620 | 621 | * `new-job(params)` 622 | 623 | Wrapper around Sparky::JobApi.new, takes the same parameters and return an instance of Sparky::JobApi class 624 | 625 | * `wait-jobs(@jobs,%args?)` 626 | 627 | Wait jobs and return state as Raku hash: 628 | 629 | ```raku 630 | %( 631 | OK => $number-of-successfully-finished jobs, 632 | FAIL => $number-of-failed jobs, 633 | ) 634 | ``` 635 | 636 | To set timeout for making http request to get job statues, use `%args`: 637 | 638 | ```raku 639 | self.wait-jobs(@jobs, %( timeout => 10)); 640 | ``` 641 | 642 | To enable debug mode: 643 | 644 | ```raku 645 | self.wait-jobs(@jobs, %( debug => True)); 646 | ``` 647 | 648 | * `wait-job($job,%args?)` 649 | 650 | The same as `wait-jobs(@jobs,%args?)`, but for a single job 651 | 652 | ## Cluster jobs 653 | 654 | One can have more then one Sparky instances and run jobs across them. 655 | 656 | This feature is called cluster jobs: 657 | 658 | ```raku 659 | use Sparky::JobApi; 660 | 661 | if tags() eq "main" { 662 | my $j = Sparky::JobApi.new(:api); 663 | $j.queue({ 664 | description => "child job" 665 | tags => %( 666 | stage => "child" 667 | ) 668 | }); 669 | } 670 | ``` 671 | 672 | The code above will run job on sparky instance located at `http://sparrowhub.io:4000` address. 673 | 674 | All what has been said before applies to cluster jobs, they are no different from your 675 | local Sparky jobs. 676 | 677 | For example one can run cluster on docker instance `alpine-with-raku` running on remote Sparky server: 678 | 679 | ```raku 680 | my $j = Sparky::JobApi.new(:api); 681 | $j.queue({ 682 | description => "child job" 683 | tags => %( 684 | stage => "child" 685 | ), 686 | sparrowdo => %( 687 | docker => "alpine-with-raku", 688 | no_sudo => True 689 | ), 690 | }); 691 | ``` 692 | 693 | For security reason Sparky server calling jobs on another Sparky server need to have the same 694 | security token. 695 | 696 | Set up `~/sparky.yaml` file on both local and remote Sparky servers: 697 | 698 | ```yaml 699 | SPARKY_API_TOKEN: secret123456 700 | ``` 701 | 702 | `SPARKY_API_TOKEN` should be any random string. 703 | 704 | Apparently one can have many Sparky servers logically combined into a cluster, and 705 | all servers within a group can run remote jobs on each other, the only requirement 706 | is they all have to share the same `SPARKY_API_TOKEN` 707 | 708 | For now `http/https` protocol are supported for cluster jobs URLs. 709 | 710 | See also "SSL support" section. 711 | -------------------------------------------------------------------------------- /docs/plugins.md: -------------------------------------------------------------------------------- 1 | ## Sparky plugins 2 | 3 | Sparky plugins are extensions points to add extra functionality to Sparky builds. 4 | 5 | These are Raku modules get run _after_ a Sparky project finishes or in other words when a build is completed. 6 | 7 | To use Sparky plugins you should: 8 | 9 | * Install plugins as Raku modules 10 | 11 | * Configure plugins in project's `sparky.yaml` file 12 | 13 | ## Install Sparky plugins 14 | 15 | You should install a module on the same server where you run Sparky at. For instance: 16 | 17 | ```bash 18 | $ zef install Sparky::Plugin::Email # Sparky plugin to send email notifications 19 | ``` 20 | 21 | ## Configuring Sparky plugins 22 | 23 | In project's `sparky.yaml` file define plugins section, it should be list of Plugins and its configurations. 24 | 25 | For instance: 26 | 27 | ```bash 28 | $ cat sparky.yaml 29 | ``` 30 | 31 | That contains: 32 | 33 | ```yaml 34 | plugins: 35 | Sparky::Plugin::Email: 36 | parameters: 37 | subject: "I finished" 38 | to: "happy@user.email" 39 | text: "here will be log" 40 | Sparky::Plugin::Hello: 41 | parameters: 42 | name: Sparrow 43 | ``` 44 | 45 | ## Developing Sparky plugins 46 | 47 | Technically speaking Sparky plugins should be just Raku modules. 48 | 49 | For instance, for mentioned module Sparky::Plugin::Email we might have this header lines: 50 | 51 | ```raku 52 | use v6; 53 | 54 | unit module Sparky::Plugin::Hello; 55 | ``` 56 | 57 | That is it. 58 | 59 | The module should have `run` routine which is invoked when Sparky processes a plugin: 60 | 61 | ```raku 62 | our sub run ( %config, %parameters ) { 63 | 64 | } 65 | ``` 66 | 67 | As we can see the `run` routine consumes its parameters as Raku Hash, these parameters are defined at mentioned `sparky.yaml` file, 68 | at plugin `parameters:` section, so this is how you might handle them: 69 | 70 | ```raku 71 | sub run ( %config, %parameters ) { 72 | 73 | say "Hello " ~ %parameters; 74 | 75 | } 76 | ``` 77 | 78 | You can use `%config` Hash to access Sparky guts: 79 | 80 | * `%config` - the project name 81 | * `%config` - the build number of current project build 82 | * `%cofig` - the state of the current build 83 | 84 | For example: 85 | 86 | ```raku 87 | sub run ( %config, %parameters ) { 88 | 89 | say "build id is: " ~ %parameters; 90 | 91 | } 92 | ``` 93 | 94 | Alternatively you may pass _some_ predefined parameters plugins: 95 | 96 | * %PROJECT% - equivalent of `%config` 97 | * %BUILD-STATE% - equivalent of `%config` 98 | * %BUILD-ID% - equivalent of `%config` 99 | 100 | For example: 101 | 102 | ```bash 103 | $ cat sparky.yaml 104 | ``` 105 | 106 | That contains: 107 | 108 | ```yaml 109 | plugins: 110 | Sparky::Plugin::Hello: 111 | parameters: 112 | name: Sparrow from project %PROJECT% 113 | ``` 114 | 115 | ## Limit plugin run scope 116 | 117 | You can defined _when_ to run plugin, here are 3 run scopes: 118 | 119 | * `anytime` - run plugin irrespective of a build state. This is default value 120 | * `success` - run plugin only if build has succeeded 121 | * `fail` - run plugin only if build has failed 122 | 123 | Scopes are defined at `run_scope:` parameter: 124 | 125 | ```yaml 126 | Sparky::Plugin::Hello: 127 | run_scope: fail 128 | parameters: 129 | name: Sparrow 130 | ``` 131 | 132 | ## Examples of Sparky plugins 133 | 134 | * [Sparky::Plugin::Hello](https://github.com/melezhik/sparky-plugin-hello) 135 | * [Sparky::Plugin::Notify::Email](https://github.com/melezhik/sparky-plugin-notify-email) 136 | -------------------------------------------------------------------------------- /docs/sparman.md: -------------------------------------------------------------------------------- 1 | # Sparman 2 | 3 | Sparman is a cli to run and configure Sparky components: 4 | 5 | - sparkyd (aka sparky worker) 6 | 7 | - web UI (aka sparky worker UI) 8 | 9 | # API 10 | 11 | ## Sparky Worker 12 | 13 | Sparky worker is a background process performing all SparrowCI tasks execution 14 | 15 | ```bash 16 | sparman.raku worker start 17 | sparman.raku worker stop 18 | sparman.raku worker status 19 | ``` 20 | 21 | ## Sparky Worker UI 22 | 23 | Sparky worker UI allows to read worker reports and manage worker jobs. This 24 | is intended for SparrowCI operations people 25 | 26 | ```bash 27 | sparman.raku worker_ui start 28 | sparman.raku worker_ui stop 29 | sparman.raku worker_ui status 30 | ``` 31 | 32 | ## Pass environment variables 33 | 34 | To pass environmental variables to services, use `--env var=val,var2=val ...` notation. 35 | 36 | For example, to set worker polling timeout to 10 seconds and skip cron jobs: 37 | 38 | ```bash 39 | sparman.raku --env SPARKY_TIMEOUT=10,SPARKY_SKIP_CRON=1 worker start 40 | ``` 41 | 42 | ## Logs 43 | 44 | Logs are available at the following locations: 45 | 46 | Sparky Woker UI - `~/.sparky/sparky-web.log` 47 | 48 | Sparky Woker - `~/.sparky/sparkyd.log ` 49 | 50 | -------------------------------------------------------------------------------- /docs/stp.md: -------------------------------------------------------------------------------- 1 | # Sparky triggering protocol (STP) 2 | 3 | Sparky Triggering Protocol allows to trigger jobs automatically by creating files in special format. 4 | 5 | Consider an example. 6 | 7 | ```bash 8 | $ nano $project/.triggers/$key 9 | ``` 10 | 11 | File has to be located in project `.trigger` directory. 12 | 13 | And `$key` should be a unique string identifying a build _within_ directory ( on per project basis ). 14 | 15 | A content of the file should be a Raku code returning a Raku Hash: 16 | 17 | ```raku 18 | { 19 | description => "web app build", 20 | cwd => "/path/to/working/directory", 21 | sparrowdo => %( 22 | localhost => True, 23 | no_sudo => True, 24 | conf => "/path/to/file.conf" 25 | ) 26 | } 27 | ``` 28 | 29 | Sparky daemon parses files in `.triggers` and launch build per every file, removing file afterwards, 30 | this process is called file triggering. 31 | 32 | To separate different builds just create trigger files with unique names inside `$project/.trigger` directory. 33 | 34 | STP allows to create _supplemental_ APIs to implement more complex and custom build logic, while keeping Sparky itself simple. 35 | 36 | ## Trigger attributes 37 | 38 | Those keys could be used in trigger Hash. All they are optional. 39 | 40 | * `cwd` 41 | Directory where sparrowfile is located, when a build gets run, the process will change to this directory. 42 | 43 | * `description` 44 | Arbitrary text description of build 45 | 46 | * `sparrowdo` 47 | 48 | Options for sparrowdo cli run, for example: 49 | 50 | ```raku 51 | sparrowdo => { 52 | %( 53 | host => "foo.bar", 54 | ssh_user => "admin", 55 | tags => "prod,backend" 56 | ) 57 | } 58 | ``` 59 | 60 | Follow [sparrowdo cli](https://github.com/melezhik/sparrowdo#sparrowdo-cli) documentation for `sparrowdo` parameters explanation. 61 | -------------------------------------------------------------------------------- /docs/ui.md: -------------------------------------------------------------------------------- 1 | # Job UIs 2 | 3 | Sparky UI DSL allows to grammatically describe UI for Sparky jobs 4 | and pass user input into scenario as variables. 5 | 6 | ## Simple example 7 | 8 | For job with following input parameters: 9 | 10 | - Name 11 | - CV 12 | - Color 13 | 14 | Add `vars` section in sparky.yaml file: 15 | 16 | ```yaml 17 | vars: 18 | - 19 | name: Name 20 | default: Alexey 21 | type: input 22 | - 23 | name: CV 24 | default: I am a programmer 25 | type: textarea 26 | 27 | - 28 | name: Language 29 | values: [ Raku, Rust, Golang ] 30 | type: select 31 | default: Rust 32 | multiple: true 33 | 34 | - 35 | name: Color 36 | values: [ Red, Blue, Green ] 37 | type: select 38 | default: Blue 39 | 40 | - 41 | name: Debug 42 | type: checkbox 43 | default: true 44 | ``` 45 | 46 | Sparky job now gets html controls for input parameters: 47 | 48 | ![build parameters](https://raw.githubusercontent.com/melezhik/sparky/master/images/sparky-web-ui-build-with-params.jpeg) 49 | 50 | Whini scenario those parameters are available through `tags()` function: 51 | 52 | ```raku 53 | say "Name param passed: ", tags(); 54 | say "CV param passed: ", tags(); 55 | say "Language param passed: ", tags(); 56 | say "Debug param passed: ", tags(); 57 | ``` 58 | 59 | When a same job get runs bypassing user input ( via HTTP API ) 60 | default values could be set via `sparrowdo.tags` section: 61 | 62 | ```yaml 63 | sparrowdo: 64 | no_sudo: true 65 | no_index_update: true 66 | bootstrap: false 67 | format: default 68 | tags: > 69 | Language=Rakudo, 70 | Name=Alex, 71 | Occupation=devops 72 | ``` 73 | 74 | ## HTML UI controls supported 75 | 76 | Currently following UI controls are supported: 77 | 78 | * text input 79 | 80 | * password 81 | 82 | * text area 83 | 84 | * select list (including multiple) 85 | 86 | * checkbox 87 | 88 | ## UI sub tasks 89 | 90 | UI sub tasks allow to split complex UI into smaller parts, by specifying `group` term. 91 | Consider this example: 92 | 93 | ``` 94 | vars: 95 | - 96 | name: Flavor 97 | default: "black" 98 | type: select 99 | values: [black, green] 100 | group: [ tea ] 101 | 102 | - 103 | name: Topic 104 | default: "milk" 105 | type: select 106 | values: [milk, cream] 107 | group: [ tea ] 108 | 109 | - 110 | name: Flavor 111 | default: "latte" 112 | type: select 113 | values: [espresso, americano, latte] 114 | group: [ coffee ] 115 | 116 | - 117 | name: Topic 118 | default: "milk" 119 | type: select 120 | values: [milk, cream, cinnamon] 121 | group: [ coffee ] 122 | multiple: true 123 | - 124 | name: Step3 125 | default: "boiled water" 126 | type: input 127 | group: [ tea, coffee ] 128 | 129 | group_vars: 130 | - tea 131 | - coffee 132 | ``` 133 | 134 | When a user clicks a job page, they'll get a choice of two separate pages, one for 135 | coffee (group coffee) and another one for tea (group tea) UI with respected UI elements. 136 | 137 | 138 | ## Templating UI variables 139 | 140 | One can template variables used in UI, by creating a global template file 141 | `SPARKY_ROOT/templates/vars.yaml` with variables: 142 | 143 | 144 | ```yaml 145 | vars: 146 | name: Alexey 147 | surname: Melezhik 148 | ``` 149 | 150 | Shared variables are inserted into job `sparky.yaml` file 151 | by using `%name%` syntax: 152 | 153 | ```yaml 154 | vars: 155 | - 156 | name: Name 157 | default: %name% 158 | type: input 159 | - 160 | name: LastName 161 | default: %surname% 162 | type: input 163 | ``` 164 | 165 | This approach allows to reduce code duplication when developing Sparky job UIs. 166 | 167 | To specify host (*) specific file, use template file located at 168 | `SPARKY_ROOT/templates/hosts/$hostname/` directory 169 | 170 | For example: 171 | 172 | `SPARKY_ROOT/templates/hosts/foo.bar` 173 | 174 | ```yaml 175 | vars: 176 | role: db_server 177 | ``` 178 | 179 | `*` Where `$hostame` is output of `hostame` command executed on the server that hosts Sparky, 180 | this variable could be overridden by `HOSTNAME` environment variable 181 | 182 | Host specific variables always override variable specified at `SPARKY_ROOT/templates/vars.yaml` 183 | 184 | --- 185 | 186 | To create nested variables use dot notation: 187 | 188 | `vars.yaml` 189 | 190 | ``` 191 | vars: 192 | user: 193 | name: Piter Pen 194 | ``` 195 | 196 | `sparky.yaml` 197 | 198 | ```yaml 199 | vars: 200 | - 201 | name: Name 202 | default: "%user.name%" 203 | type: input 204 | ``` 205 | 206 | ## Templating tag variables 207 | 208 | Tag variables in `sparky.yaml` could use template variables: 209 | 210 | `vars.yaml`: 211 | 212 | ``` 213 | user: 214 | name: Alex 215 | surname: Melezhik 216 | ``` 217 | 218 | ``` 219 | sparrowdo: 220 | tags: | 221 | Name=%user.name%, 222 | LastName=%user.surname% 223 | ``` 224 | -------------------------------------------------------------------------------- /examples/git-clone/sparky.yaml: -------------------------------------------------------------------------------- 1 | crontab: "2 * * * *" 2 | sparrowdo: 3 | no_sudo: true 4 | no_index_update: false 5 | bootstrap: false 6 | format: default 7 | disabled: false 8 | keep_builds: 5 9 | -------------------------------------------------------------------------------- /examples/git-clone/sparrowfile: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | directory "project"; 4 | 5 | git-scm 'https://github.com/melezhik/rakudist-teddy-bear.git', %( 6 | to => "project" 7 | ); 8 | 9 | bash "ls -l {%*ENV}/project"; 10 | -------------------------------------------------------------------------------- /examples/hello-world/sparky.yaml: -------------------------------------------------------------------------------- 1 | #crontab: "2 * * * *" 2 | sparrowdo: 3 | no_sudo: true 4 | no_index_update: false 5 | bootstrap: false 6 | format: default 7 | disabled: false 8 | keep_builds: 5 9 | allow_manual_run: true 10 | -------------------------------------------------------------------------------- /examples/hello-world/sparrowfile: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | bash "echo Hello World"; 4 | 5 | say config().perl; 6 | 7 | say tags().perl; 8 | 9 | -------------------------------------------------------------------------------- /examples/http-request/sparky.yaml: -------------------------------------------------------------------------------- 1 | #crontab: "2 * * * *" 2 | sparrowdo: 3 | no_sudo: true 4 | no_index_update: false 5 | bootstrap: false 6 | format: default 7 | disabled: false 8 | keep_builds: 5 9 | allow_manual_run: true 10 | -------------------------------------------------------------------------------- /examples/http-request/sparrowfile: -------------------------------------------------------------------------------- 1 | #!perl6 2 | 3 | http-ok 'http://raku.org', %( has-content => "'Raku'" ); 4 | 5 | -------------------------------------------------------------------------------- /examples/job-api-https/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-api-https/sparrowfile: -------------------------------------------------------------------------------- 1 | if tags() eq "main" { 2 | 3 | use Sparky::JobApi; 4 | 5 | my $j = Sparky::JobApi.new :workers<10>; 6 | 7 | $j.queue({ 8 | description => "spawned job (localhost)", 9 | tags => %( 10 | stage => "child", 11 | foo => 1, 12 | bar => 2, 13 | ), 14 | }); 15 | 16 | say "job info: ", $j.info.perl; 17 | 18 | $j = Sparky::JobApi.new: :api, :workers<10>; 19 | 20 | $j.queue({ 21 | description => "spawned job (api)", 22 | tags => %( 23 | stage => "child", 24 | foo => 1, 25 | bar => 2, 26 | ), 27 | }); 28 | 29 | say "job info: ", $j.info.perl; 30 | 31 | } elsif tags() eq "child" { 32 | 33 | say "I am a child scenario"; 34 | say "config: ", config().perl; 35 | say "tags: ", tags().perl; 36 | 37 | } 38 | -------------------------------------------------------------------------------- /examples/job-file-01/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-file-01/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | class Pipeline 4 | 5 | does Sparky::JobApi::Role 6 | 7 | { 8 | 9 | method stage-main { 10 | 11 | say "hello from main ..."; 12 | 13 | my $j = self.new-job; 14 | 15 | $j.queue: %( 16 | tags => %( 17 | stage => "child" 18 | ) 19 | ); 20 | 21 | my $st = self.wait-job($j); 22 | 23 | die unless $st; 24 | 25 | say $j.get-file("README2.md"); 26 | 27 | } 28 | 29 | method stage-child { 30 | 31 | say "hello from child"; 32 | 33 | my $j = Sparky::JobApi.new: mine => True; 34 | 35 | task-run "http/GET 1.png", "curl", %( 36 | args => [ 37 | %( 38 | 'output' => "{$*CWD}/README.md" 39 | ), 40 | [ 41 | 'silent', 42 | '-f', 43 | 'location' 44 | ], 45 | #'https://raw.githubusercontent.com/melezhik/images/master/1.png' 46 | 'https://raw.githubusercontent.com/melezhik/images/master/README.md' 47 | ] 48 | ); 49 | 50 | $j.put-file("{$*CWD}/README.md","README2.md"); 51 | 52 | } 53 | 54 | } 55 | 56 | 57 | Pipeline.new.run; 58 | 59 | 60 | -------------------------------------------------------------------------------- /examples/job-queue-cluster-many/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-cluster-many/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | my @workers = 'http://sparrowhub.io:4000', 'https://sparky01.centralus.cloudapp.azure.com:4000', 'https://sparky02.centralus.cloudapp.azure.com:4000'; 4 | 5 | if tags() eq "main" { 6 | 7 | my @q; 8 | 9 | for @workers -> $w { 10 | 11 | my $j = Sparky::JobApi.new(api => $w, workers => 80); 12 | 13 | $j.queue({ 14 | description => "test job", 15 | tags => %( 16 | stage => "child", 17 | ), 18 | }); 19 | 20 | push @q, $j; 21 | 22 | } 23 | 24 | my @jobs; 25 | 26 | for @q -> $q { 27 | 28 | my $supply = supply { 29 | 30 | while True { 31 | 32 | my $status = $q.status; 33 | 34 | emit %( id => $q.info(), status => $status ); 35 | 36 | done if $status eq "FAIL" or $status eq "OK"; 37 | 38 | sleep(1); 39 | 40 | } 41 | 42 | } 43 | 44 | $supply.tap( -> $v { 45 | push @jobs, $v if $v eq "FAIL" or $v eq "OK"; 46 | say $v; 47 | }); 48 | 49 | } 50 | 51 | say @jobs.grep({$_ eq "OK"}).elems, " jobs finished successfully"; 52 | say @jobs.grep({$_ eq "FAIL"}).elems, " jobs failed"; 53 | say @jobs.grep({$_ eq "TIMEOUT"}).elems, " jobs timeouted"; 54 | 55 | } elsif tags() eq "child" { 56 | 57 | say "I am a child job!"; 58 | 59 | say tags().perl; 60 | 61 | } 62 | 63 | 64 | -------------------------------------------------------------------------------- /examples/job-queue-cluster/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-cluster/sparrowfile: -------------------------------------------------------------------------------- 1 | if tags() eq "main" { 2 | 3 | # spawns a child job 4 | 5 | use Sparky::JobApi; 6 | my $j = Sparky::JobApi.new(:api); 7 | $j.queue({ 8 | description => "my spawned job", 9 | tags => %( 10 | stage => "child", 11 | foo => 1, 12 | bar => 2, 13 | ), 14 | #sparrowdo => %( 15 | # no_sudo => True, 16 | # bootstrap => False 17 | #) 18 | }); 19 | 20 | say "queue spawned job, ",$j.info.perl; 21 | 22 | my $supply = supply { 23 | 24 | while True { 25 | 26 | my $status = $j.status; 27 | 28 | emit %( job-id => $j.info, status => $status ); 29 | 30 | done if $status eq "FAIL" or $status eq "OK"; 31 | 32 | sleep(5); 33 | 34 | } 35 | } 36 | 37 | $supply.tap( -> $v { 38 | say $v; 39 | }); 40 | } elsif tags() eq "child" { 41 | 42 | # child job here 43 | 44 | sleep(10); 45 | 46 | say "config: ", config().perl; 47 | say "tags: ", tags().perl; 48 | 49 | } 50 | -------------------------------------------------------------------------------- /examples/job-queue-override-attr/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-override-attr/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | if tags() eq "main" { 4 | 5 | my $j = Sparky::JobApi.new(:project); 6 | 7 | $j.queue({ 8 | description => "spawned job. 022", 9 | tags => %( 10 | stage => "child", 11 | foo => 1, 12 | bar => 2, 13 | ), 14 | sparrowdo => %( 15 | no_index_update => False 16 | ) 17 | }); 18 | 19 | say "queue spawned job ", $j.info.perl; 20 | 21 | } elsif tags() eq "child" { 22 | 23 | say "I am a child scenario"; 24 | 25 | my $j = Sparky::JobApi.new(:project); 26 | 27 | $j.queue({ 28 | description => "spawned job2. 022", 29 | tags => %( 30 | stage => "off", 31 | foo => 1, 32 | bar => 2, 33 | ), 34 | sparrowdo => %( 35 | host => "sparrowhub.io", 36 | ssh_user => "root", 37 | ) 38 | }); 39 | 40 | say "queue spawned job ", $j.info.perl; 41 | 42 | } elsif tags() eq "off" { 43 | 44 | bash "hostname"; 45 | 46 | say "I am off now, good buy!"; 47 | say "config: ", config().perl; 48 | say "tags: ", tags().perl; 49 | 50 | } 51 | 52 | -------------------------------------------------------------------------------- /examples/job-queue-predefined/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-predefined/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | sub wait-jobs(@q) { 4 | 5 | my @jobs; 6 | 7 | for @q -> $j { 8 | 9 | my $supply = supply { 10 | 11 | while True { 12 | 13 | my %info = $j.info; 14 | 15 | my $status = $j.status; 16 | 17 | %info = $status; 18 | 19 | emit %info; 20 | 21 | done if $status eq "FAIL" or $status eq "OK"; 22 | 23 | sleep(1); 24 | 25 | } 26 | 27 | } 28 | 29 | $supply.tap( -> $v { 30 | push @jobs, $v if $v eq "FAIL" or $v eq "OK"; 31 | say $v; 32 | }); 33 | 34 | } 35 | 36 | say @jobs.grep({$_ eq "OK"}).elems, " jobs finished successfully"; 37 | say @jobs.grep({$_ eq "FAIL"}).elems, " jobs failed"; 38 | say @jobs.grep({$_ eq "TIMEOUT"}).elems, " jobs timeouted"; 39 | 40 | } 41 | 42 | if tags() eq "main" { 43 | 44 | my $rand = ('a' .. 'z').pick(20).join(''); 45 | 46 | my $job-id = "{$rand}_1"; 47 | 48 | Sparky::JobApi.new(:project,:$job-id).queue({ 49 | description => "spawned job. 03.1", 50 | tags => %( 51 | seed => $rand, 52 | stage => "child", 53 | i => 1, 54 | ), 55 | }); 56 | 57 | my @jobs; 58 | 59 | # wait all 10 recursively launched jobs 60 | # that are not yet launched by that point 61 | # but will be launched recursively 62 | # in "child" jobs 63 | 64 | for 1 .. 10 -> $i { 65 | 66 | my $project = "worker_{$i}"; 67 | 68 | my $job-id = "{$rand}_{$i}"; 69 | 70 | my $j = Sparky::JobApi.new: :$project, :$job-id; 71 | 72 | @jobs.push: $j; 73 | 74 | } 75 | 76 | wait-jobs @jobs; 77 | 78 | } elsif tags() eq "child" { 79 | 80 | say "I am a child job!"; 81 | 82 | say tags().perl; 83 | 84 | if tags() < 10 { 85 | 86 | my $i = tags().Int + 1; 87 | 88 | # do some useful stuff here 89 | # and launch another recursive job 90 | # with predefined project and job ID 91 | # $i variable gets incremented 92 | # and all recursively launched jobs 93 | # get waited in a "main" scenario, 94 | # function wait-jobs 95 | 96 | my $project = "worker_{$i}"; 97 | my $job-id = "{tags()}_{$i}"; 98 | 99 | Sparky::JobApi.new(:$project,:$job-id).queue({ 100 | description => "spawned job. 03.{$i}", 101 | tags => %( 102 | seed => tags(), 103 | stage => "child", 104 | i => $i, 105 | ), 106 | }); 107 | } 108 | } 109 | 110 | -------------------------------------------------------------------------------- /examples/job-queue-reqursive/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-reqursive/sparrowfile: -------------------------------------------------------------------------------- 1 | if tags() eq "main" { 2 | 3 | use Sparky::JobApi; 4 | 5 | my $project = "spawned_01"; 6 | 7 | my $j = Sparky::JobApi.new(:project); 8 | 9 | $j.queue({ 10 | description => "spawned job. 02", 11 | tags => %( 12 | stage => "child", 13 | foo => 1, 14 | bar => 2, 15 | ), 16 | sparrowdo => %( 17 | no_index_update => True 18 | ) 19 | }); 20 | 21 | say "queue spawned job ", $j.info.perl; 22 | 23 | } elsif tags() eq "child" { 24 | 25 | use Sparky::JobApi; 26 | 27 | say "I am a child scenario"; 28 | 29 | my $j = Sparky::JobApi.new(:project); 30 | 31 | $j.queue({ 32 | description => "spawned job2. 02", 33 | tags => %( 34 | stage => "off", 35 | foo => 1, 36 | bar => 2, 37 | ), 38 | }); 39 | 40 | say "queue spawned job ",$j.info.perl; 41 | 42 | } elsif tags() eq "off" { 43 | 44 | say "I am off now, good buy!"; 45 | say "config: ", config().perl; 46 | say "tags: ", tags().perl; 47 | 48 | } 49 | 50 | -------------------------------------------------------------------------------- /examples/job-queue-wait-child/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-wait-child/sparrowfile: -------------------------------------------------------------------------------- 1 | if tags() eq "main" { 2 | 3 | # spawns a child job 4 | 5 | use Sparky::JobApi; 6 | my $j = Sparky::JobApi.new(:project); 7 | $j.queue({ 8 | description => "my spawned job", 9 | tags => %( 10 | stage => "child", 11 | foo => 1, 12 | bar => 2, 13 | ), 14 | }); 15 | 16 | say "queue spawned job, ",$j.info.perl; 17 | 18 | my $supply = supply { 19 | 20 | while True { 21 | 22 | my $status = $j.status; 23 | 24 | emit %( job-id => $j.info, status => $status ); 25 | 26 | done if $status eq "FAIL" or $status eq "OK"; 27 | 28 | sleep(5); 29 | 30 | } 31 | } 32 | 33 | $supply.tap( -> $v { 34 | say $v; 35 | }); 36 | } elsif tags() eq "child" { 37 | 38 | # child job here 39 | 40 | sleep(10); 41 | 42 | say "config: ", config().perl; 43 | say "tags: ", tags().perl; 44 | 45 | } 46 | -------------------------------------------------------------------------------- /examples/job-queue-with-class/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-with-class/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | class Pipeline 4 | 5 | does Sparky::JobApi::Role 6 | 7 | { 8 | 9 | method stage-main { 10 | 11 | my $j = self.new-job: :project; 12 | 13 | $j.queue({ 14 | description => "spawned job. 01", 15 | tags => %( 16 | stage => "child", 17 | foo => 1, 18 | bar => 2, 19 | ), 20 | }); 21 | 22 | say "job info: ", $j.info.perl; 23 | 24 | my $st = self.wait-job($j); 25 | 26 | say $st.perl; 27 | 28 | die if $st; 29 | 30 | } 31 | 32 | method stage-child { 33 | 34 | say "I am a child scenario"; 35 | say "config: ", config().perl; 36 | say "tags: ", tags().perl; 37 | 38 | } 39 | 40 | } 41 | 42 | Pipeline.new.run; 43 | -------------------------------------------------------------------------------- /examples/job-queue-workers/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue-workers/sparrowfile: -------------------------------------------------------------------------------- 1 | if tags() eq "main" { 2 | 3 | use Sparky::JobApi; 4 | 5 | my $j = Sparky::JobApi.new(:workers<90>); 6 | 7 | $j.queue({ 8 | description => "spawned job", 9 | tags => %( 10 | stage => "child", 11 | foo => 1, 12 | bar => 2, 13 | ), 14 | }); 15 | 16 | say "queue spawned job ", $j.info; 17 | 18 | } elsif tags() eq "child" { 19 | 20 | say "I am a child scenario"; 21 | say "config: ", config().perl; 22 | say "tags: ", tags().perl; 23 | 24 | } 25 | -------------------------------------------------------------------------------- /examples/job-queue/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-queue/sparrowfile: -------------------------------------------------------------------------------- 1 | if tags() eq "main" { 2 | 3 | use Sparky::JobApi; 4 | 5 | my $j = Sparky::JobApi.new(:project); 6 | 7 | $j.queue({ 8 | description => "spawned job. 01", 9 | tags => %( 10 | stage => "child", 11 | foo => 1, 12 | bar => 2, 13 | ), 14 | }); 15 | 16 | say "job info: ", $j.info.perl; 17 | 18 | } elsif tags() eq "child" { 19 | 20 | say "I am a child scenario"; 21 | say "config: ", config().perl; 22 | say "tags: ", tags().perl; 23 | 24 | } 25 | -------------------------------------------------------------------------------- /examples/job-stash-child-parent/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-stash-child-parent/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | if tags() eq "main" { 4 | 5 | # spawns a child job 6 | 7 | my $j = Sparky::JobApi.new(:project); 8 | $j.queue({ 9 | description => "my spawned job", 10 | tags => %( 11 | stage => "child", 12 | foo => 1, 13 | bar => 2, 14 | ), 15 | }); 16 | 17 | say "queue spawned job, ",$j.info.perl; 18 | 19 | my $supply = supply { 20 | 21 | while True { 22 | 23 | my $status = $j.status; 24 | 25 | emit %( job-id => $j.info, status => $status ); 26 | 27 | done if $status eq "FAIL" or $status eq "OK"; 28 | 29 | sleep(5); 30 | 31 | } 32 | } 33 | 34 | $supply.tap( -> $v { 35 | say $v; 36 | }); 37 | 38 | say $j.get-stash().perl; 39 | 40 | 41 | } elsif tags() eq "child" { 42 | 43 | # child job here 44 | 45 | say "config: ", config().perl; 46 | say "tags: ", tags().perl; 47 | 48 | my $j = Sparky::JobApi.new( mine => True ); 49 | 50 | $j.put-stash({ hello => "Sparky" }); 51 | 52 | } 53 | -------------------------------------------------------------------------------- /examples/job-stash-parent-child/sparky.yaml: -------------------------------------------------------------------------------- 1 | sparrowdo: 2 | no_sudo: true 3 | no_index_update: false 4 | bootstrap: false 5 | format: default 6 | tags: stage=main 7 | disabled: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/job-stash-parent-child/sparrowfile: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | if tags() eq "main" { 4 | 5 | # spawns a child job 6 | 7 | my $j = Sparky::JobApi.new(:project); 8 | 9 | $j.put-stash({ hello => "world" }); 10 | 11 | $j.queue({ 12 | description => "my spawned job", 13 | tags => %( 14 | stage => "child", 15 | foo => 1, 16 | bar => 2, 17 | ), 18 | }); 19 | 20 | say "queue spawned job, ",$j.info.perl; 21 | 22 | my $supply = supply { 23 | 24 | while True { 25 | 26 | my $status = $j.status; 27 | 28 | emit %( job-id => $j.info, status => $status ); 29 | 30 | done if $status eq "FAIL" or $status eq "OK"; 31 | 32 | sleep(5); 33 | 34 | } 35 | } 36 | 37 | $supply.tap( -> $v { 38 | say $v; 39 | }); 40 | 41 | } elsif tags() eq "child" { 42 | 43 | # child job here 44 | 45 | say "config: ", config().perl; 46 | say "tags: ", tags().perl; 47 | 48 | my $j = Sparky::JobApi.new( mine => True ); 49 | 50 | say $j.get-stash().perl; 51 | 52 | } 53 | -------------------------------------------------------------------------------- /examples/raku-project/sparky.yaml: -------------------------------------------------------------------------------- 1 | #crontab: "* * * * *" 2 | sparrowdo: 3 | no_sudo: true 4 | no_index_update: false 5 | bootstrap: false 6 | allow_manual_run: true 7 | #disabled: false 8 | keep_builds: 5 9 | 10 | -------------------------------------------------------------------------------- /examples/raku-project/sparrowfile: -------------------------------------------------------------------------------- 1 | directory "project"; 2 | 3 | git-scm 'https://github.com/melezhik/rakudist-teddy-bear.git', %( 4 | to => "project", 5 | ); 6 | 7 | zef "{%*ENV}/project", %( depsonly => True ); 8 | 9 | zef 'TAP::Harness App::Prove6'; 10 | 11 | bash 'prove6 -l', %( 12 | debug => True, 13 | cwd => "{%*ENV}/project/" 14 | ); 15 | -------------------------------------------------------------------------------- /examples/request.json: -------------------------------------------------------------------------------- 1 | { 2 | "description" : "test build", 3 | "tags" : "message=hello,from=Sparky" 4 | } 5 | -------------------------------------------------------------------------------- /examples/scm-trigger/sparky.yaml: -------------------------------------------------------------------------------- 1 | scm: 2 | url: https://github.com/melezhik/rakudist-teddy-bear.git 3 | branch: test111 4 | sparrowdo: 5 | no_sudo: true 6 | no_index_update: false 7 | bootstrap: false 8 | allow_manual_run: true 9 | keep_builds: 5 10 | -------------------------------------------------------------------------------- /examples/scm-trigger/sparrowfile: -------------------------------------------------------------------------------- 1 | say tags().perl; 2 | 3 | directory "scm"; 4 | 5 | git-scm tags(), %( 6 | to => "scm", 7 | branch => tags 8 | ); 9 | 10 | bash "ls -l {%*ENV}/scm"; 11 | -------------------------------------------------------------------------------- /icons/build-fail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/icons/build-fail.png -------------------------------------------------------------------------------- /icons/build-na.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/icons/build-na.png -------------------------------------------------------------------------------- /icons/build-pass.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/icons/build-pass.png -------------------------------------------------------------------------------- /icons/build-queue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/icons/build-queue.png -------------------------------------------------------------------------------- /icons/build-run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/icons/build-run.png -------------------------------------------------------------------------------- /images/sparky-web-ui-build-with-params.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/sparky-web-ui-build-with-params.jpeg -------------------------------------------------------------------------------- /images/sparky-web-ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/sparky-web-ui.png -------------------------------------------------------------------------------- /images/sparky-web-ui3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/sparky-web-ui3.png -------------------------------------------------------------------------------- /images/sparky-web-ui4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/sparky-web-ui4.png -------------------------------------------------------------------------------- /images/sparky-web-ui5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/sparky-web-ui5.png -------------------------------------------------------------------------------- /images/sparky-web-ui6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/sparky-web-ui6.png -------------------------------------------------------------------------------- /images/task-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/images/task-result.png -------------------------------------------------------------------------------- /js/ansi_up.js: -------------------------------------------------------------------------------- 1 | /* ansi_up.js 2 | * author : Dru Nelson 3 | * license : MIT 4 | * http://github.com/drudru/ansi_up 5 | */ 6 | (function (root, factory) { 7 | if (typeof define === 'function' && define.amd) { 8 | // AMD. Register as an anonymous module. 9 | define(['exports'], factory); 10 | } else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') { 11 | // CommonJS 12 | factory(exports); 13 | } else { 14 | // Browser globals 15 | var exp = {}; 16 | factory(exp); 17 | root.AnsiUp = exp.default; 18 | } 19 | }(this, function (exports) { 20 | "use strict"; 21 | var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) { 22 | if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } 23 | return cooked; 24 | }; 25 | var PacketKind; 26 | (function (PacketKind) { 27 | PacketKind[PacketKind["EOS"] = 0] = "EOS"; 28 | PacketKind[PacketKind["Text"] = 1] = "Text"; 29 | PacketKind[PacketKind["Incomplete"] = 2] = "Incomplete"; 30 | PacketKind[PacketKind["ESC"] = 3] = "ESC"; 31 | PacketKind[PacketKind["Unknown"] = 4] = "Unknown"; 32 | PacketKind[PacketKind["SGR"] = 5] = "SGR"; 33 | PacketKind[PacketKind["OSCURL"] = 6] = "OSCURL"; 34 | })(PacketKind || (PacketKind = {})); 35 | var AnsiUp = (function () { 36 | function AnsiUp() { 37 | this.VERSION = "5.1.0"; 38 | this.setup_palettes(); 39 | this._use_classes = false; 40 | this.bold = false; 41 | this.italic = false; 42 | this.underline = false; 43 | this.fg = this.bg = null; 44 | this._buffer = ''; 45 | this._url_whitelist = { 'http': 1, 'https': 1 }; 46 | } 47 | Object.defineProperty(AnsiUp.prototype, "use_classes", { 48 | get: function () { 49 | return this._use_classes; 50 | }, 51 | set: function (arg) { 52 | this._use_classes = arg; 53 | }, 54 | enumerable: false, 55 | configurable: true 56 | }); 57 | Object.defineProperty(AnsiUp.prototype, "url_whitelist", { 58 | get: function () { 59 | return this._url_whitelist; 60 | }, 61 | set: function (arg) { 62 | this._url_whitelist = arg; 63 | }, 64 | enumerable: false, 65 | configurable: true 66 | }); 67 | AnsiUp.prototype.setup_palettes = function () { 68 | var _this = this; 69 | this.ansi_colors = 70 | [ 71 | [ 72 | { rgb: [0, 0, 0], class_name: "ansi-black" }, 73 | { rgb: [187, 0, 0], class_name: "ansi-red" }, 74 | { rgb: [0, 187, 0], class_name: "ansi-green" }, 75 | { rgb: [187, 187, 0], class_name: "ansi-yellow" }, 76 | { rgb: [0, 0, 187], class_name: "ansi-blue" }, 77 | { rgb: [187, 0, 187], class_name: "ansi-magenta" }, 78 | { rgb: [0, 187, 187], class_name: "ansi-cyan" }, 79 | { rgb: [255, 255, 255], class_name: "ansi-white" } 80 | ], 81 | [ 82 | { rgb: [85, 85, 85], class_name: "ansi-bright-black" }, 83 | { rgb: [255, 85, 85], class_name: "ansi-bright-red" }, 84 | { rgb: [0, 255, 0], class_name: "ansi-bright-green" }, 85 | { rgb: [255, 255, 85], class_name: "ansi-bright-yellow" }, 86 | { rgb: [85, 85, 255], class_name: "ansi-bright-blue" }, 87 | { rgb: [255, 85, 255], class_name: "ansi-bright-magenta" }, 88 | { rgb: [85, 255, 255], class_name: "ansi-bright-cyan" }, 89 | { rgb: [255, 255, 255], class_name: "ansi-bright-white" } 90 | ] 91 | ]; 92 | this.palette_256 = []; 93 | this.ansi_colors.forEach(function (palette) { 94 | palette.forEach(function (rec) { 95 | _this.palette_256.push(rec); 96 | }); 97 | }); 98 | var levels = [0, 95, 135, 175, 215, 255]; 99 | for (var r = 0; r < 6; ++r) { 100 | for (var g = 0; g < 6; ++g) { 101 | for (var b = 0; b < 6; ++b) { 102 | var col = { rgb: [levels[r], levels[g], levels[b]], class_name: 'truecolor' }; 103 | this.palette_256.push(col); 104 | } 105 | } 106 | } 107 | var grey_level = 8; 108 | for (var i = 0; i < 24; ++i, grey_level += 10) { 109 | var gry = { rgb: [grey_level, grey_level, grey_level], class_name: 'truecolor' }; 110 | this.palette_256.push(gry); 111 | } 112 | }; 113 | AnsiUp.prototype.escape_txt_for_html = function (txt) { 114 | return txt.replace(/[&<>"']/gm, function (str) { 115 | if (str === "&") 116 | return "&"; 117 | if (str === "<") 118 | return "<"; 119 | if (str === ">") 120 | return ">"; 121 | if (str === "\"") 122 | return """; 123 | if (str === "'") 124 | return "'"; 125 | }); 126 | }; 127 | AnsiUp.prototype.append_buffer = function (txt) { 128 | var str = this._buffer + txt; 129 | this._buffer = str; 130 | }; 131 | AnsiUp.prototype.get_next_packet = function () { 132 | var pkt = { 133 | kind: PacketKind.EOS, 134 | text: '', 135 | url: '' 136 | }; 137 | var len = this._buffer.length; 138 | if (len == 0) 139 | return pkt; 140 | var pos = this._buffer.indexOf("\x1B"); 141 | if (pos == -1) { 142 | pkt.kind = PacketKind.Text; 143 | pkt.text = this._buffer; 144 | this._buffer = ''; 145 | return pkt; 146 | } 147 | if (pos > 0) { 148 | pkt.kind = PacketKind.Text; 149 | pkt.text = this._buffer.slice(0, pos); 150 | this._buffer = this._buffer.slice(pos); 151 | return pkt; 152 | } 153 | if (pos == 0) { 154 | if (len == 1) { 155 | pkt.kind = PacketKind.Incomplete; 156 | return pkt; 157 | } 158 | var next_char = this._buffer.charAt(1); 159 | if ((next_char != '[') && (next_char != ']')) { 160 | pkt.kind = PacketKind.ESC; 161 | pkt.text = this._buffer.slice(0, 1); 162 | this._buffer = this._buffer.slice(1); 163 | return pkt; 164 | } 165 | if (next_char == '[') { 166 | if (!this._csi_regex) { 167 | this._csi_regex = rgx(__makeTemplateObject(["\n ^ # beginning of line\n #\n # First attempt\n (?: # legal sequence\n \u001B[ # CSI\n ([<-?]?) # private-mode char\n ([d;]*) # any digits or semicolons\n ([ -/]? # an intermediate modifier\n [@-~]) # the command\n )\n | # alternate (second attempt)\n (?: # illegal sequence\n \u001B[ # CSI\n [ -~]* # anything legal\n ([\0-\u001F:]) # anything illegal\n )\n "], ["\n ^ # beginning of line\n #\n # First attempt\n (?: # legal sequence\n \\x1b\\[ # CSI\n ([\\x3c-\\x3f]?) # private-mode char\n ([\\d;]*) # any digits or semicolons\n ([\\x20-\\x2f]? # an intermediate modifier\n [\\x40-\\x7e]) # the command\n )\n | # alternate (second attempt)\n (?: # illegal sequence\n \\x1b\\[ # CSI\n [\\x20-\\x7e]* # anything legal\n ([\\x00-\\x1f:]) # anything illegal\n )\n "])); 168 | } 169 | var match = this._buffer.match(this._csi_regex); 170 | if (match === null) { 171 | pkt.kind = PacketKind.Incomplete; 172 | return pkt; 173 | } 174 | if (match[4]) { 175 | pkt.kind = PacketKind.ESC; 176 | pkt.text = this._buffer.slice(0, 1); 177 | this._buffer = this._buffer.slice(1); 178 | return pkt; 179 | } 180 | if ((match[1] != '') || (match[3] != 'm')) 181 | pkt.kind = PacketKind.Unknown; 182 | else 183 | pkt.kind = PacketKind.SGR; 184 | pkt.text = match[2]; 185 | var rpos = match[0].length; 186 | this._buffer = this._buffer.slice(rpos); 187 | return pkt; 188 | } 189 | if (next_char == ']') { 190 | if (len < 4) { 191 | pkt.kind = PacketKind.Incomplete; 192 | return pkt; 193 | } 194 | if ((this._buffer.charAt(2) != '8') 195 | || (this._buffer.charAt(3) != ';')) { 196 | pkt.kind = PacketKind.ESC; 197 | pkt.text = this._buffer.slice(0, 1); 198 | this._buffer = this._buffer.slice(1); 199 | return pkt; 200 | } 201 | if (!this._osc_st) { 202 | this._osc_st = rgxG(__makeTemplateObject(["\n (?: # legal sequence\n (\u001B\\) # ESC | # alternate\n (\u0007) # BEL (what xterm did)\n )\n | # alternate (second attempt)\n ( # illegal sequence\n [\0-\u0006] # anything illegal\n | # alternate\n [\b-\u001A] # anything illegal\n | # alternate\n [\u001C-\u001F] # anything illegal\n )\n "], ["\n (?: # legal sequence\n (\\x1b\\\\) # ESC \\\n | # alternate\n (\\x07) # BEL (what xterm did)\n )\n | # alternate (second attempt)\n ( # illegal sequence\n [\\x00-\\x06] # anything illegal\n | # alternate\n [\\x08-\\x1a] # anything illegal\n | # alternate\n [\\x1c-\\x1f] # anything illegal\n )\n "])); 203 | } 204 | this._osc_st.lastIndex = 0; 205 | { 206 | var match_1 = this._osc_st.exec(this._buffer); 207 | if (match_1 === null) { 208 | pkt.kind = PacketKind.Incomplete; 209 | return pkt; 210 | } 211 | if (match_1[3]) { 212 | pkt.kind = PacketKind.ESC; 213 | pkt.text = this._buffer.slice(0, 1); 214 | this._buffer = this._buffer.slice(1); 215 | return pkt; 216 | } 217 | } 218 | { 219 | var match_2 = this._osc_st.exec(this._buffer); 220 | if (match_2 === null) { 221 | pkt.kind = PacketKind.Incomplete; 222 | return pkt; 223 | } 224 | if (match_2[3]) { 225 | pkt.kind = PacketKind.ESC; 226 | pkt.text = this._buffer.slice(0, 1); 227 | this._buffer = this._buffer.slice(1); 228 | return pkt; 229 | } 230 | } 231 | if (!this._osc_regex) { 232 | this._osc_regex = rgx(__makeTemplateObject(["\n ^ # beginning of line\n #\n \u001B]8; # OSC Hyperlink\n [ -:<-~]* # params (excluding ;)\n ; # end of params\n ([!-~]{0,512}) # URL capture\n (?: # ST\n (?:\u001B\\) # ESC | # alternate\n (?:\u0007) # BEL (what xterm did)\n )\n ([ -~]+) # TEXT capture\n \u001B]8;; # OSC Hyperlink End\n (?: # ST\n (?:\u001B\\) # ESC | # alternate\n (?:\u0007) # BEL (what xterm did)\n )\n "], ["\n ^ # beginning of line\n #\n \\x1b\\]8; # OSC Hyperlink\n [\\x20-\\x3a\\x3c-\\x7e]* # params (excluding ;)\n ; # end of params\n ([\\x21-\\x7e]{0,512}) # URL capture\n (?: # ST\n (?:\\x1b\\\\) # ESC \\\n | # alternate\n (?:\\x07) # BEL (what xterm did)\n )\n ([\\x20-\\x7e]+) # TEXT capture\n \\x1b\\]8;; # OSC Hyperlink End\n (?: # ST\n (?:\\x1b\\\\) # ESC \\\n | # alternate\n (?:\\x07) # BEL (what xterm did)\n )\n "])); 233 | } 234 | var match = this._buffer.match(this._osc_regex); 235 | if (match === null) { 236 | pkt.kind = PacketKind.ESC; 237 | pkt.text = this._buffer.slice(0, 1); 238 | this._buffer = this._buffer.slice(1); 239 | return pkt; 240 | } 241 | pkt.kind = PacketKind.OSCURL; 242 | pkt.url = match[1]; 243 | pkt.text = match[2]; 244 | var rpos = match[0].length; 245 | this._buffer = this._buffer.slice(rpos); 246 | return pkt; 247 | } 248 | } 249 | }; 250 | AnsiUp.prototype.ansi_to_html = function (txt) { 251 | this.append_buffer(txt); 252 | var blocks = []; 253 | while (true) { 254 | var packet = this.get_next_packet(); 255 | if ((packet.kind == PacketKind.EOS) 256 | || (packet.kind == PacketKind.Incomplete)) 257 | break; 258 | if ((packet.kind == PacketKind.ESC) 259 | || (packet.kind == PacketKind.Unknown)) 260 | continue; 261 | if (packet.kind == PacketKind.Text) 262 | blocks.push(this.transform_to_html(this.with_state(packet))); 263 | else if (packet.kind == PacketKind.SGR) 264 | this.process_ansi(packet); 265 | else if (packet.kind == PacketKind.OSCURL) 266 | blocks.push(this.process_hyperlink(packet)); 267 | } 268 | return blocks.join(""); 269 | }; 270 | AnsiUp.prototype.with_state = function (pkt) { 271 | return { bold: this.bold, italic: this.italic, underline: this.underline, fg: this.fg, bg: this.bg, text: pkt.text }; 272 | }; 273 | AnsiUp.prototype.process_ansi = function (pkt) { 274 | var sgr_cmds = pkt.text.split(';'); 275 | while (sgr_cmds.length > 0) { 276 | var sgr_cmd_str = sgr_cmds.shift(); 277 | var num = parseInt(sgr_cmd_str, 10); 278 | if (isNaN(num) || num === 0) { 279 | this.fg = this.bg = null; 280 | this.bold = false; 281 | this.italic = false; 282 | this.underline = false; 283 | } 284 | else if (num === 1) { 285 | this.bold = true; 286 | } 287 | else if (num === 3) { 288 | this.italic = true; 289 | } 290 | else if (num === 4) { 291 | this.underline = true; 292 | } 293 | else if (num === 22) { 294 | this.bold = false; 295 | } 296 | else if (num === 23) { 297 | this.italic = false; 298 | } 299 | else if (num === 24) { 300 | this.underline = false; 301 | } 302 | else if (num === 39) { 303 | this.fg = null; 304 | } 305 | else if (num === 49) { 306 | this.bg = null; 307 | } 308 | else if ((num >= 30) && (num < 38)) { 309 | this.fg = this.ansi_colors[0][(num - 30)]; 310 | } 311 | else if ((num >= 40) && (num < 48)) { 312 | this.bg = this.ansi_colors[0][(num - 40)]; 313 | } 314 | else if ((num >= 90) && (num < 98)) { 315 | this.fg = this.ansi_colors[1][(num - 90)]; 316 | } 317 | else if ((num >= 100) && (num < 108)) { 318 | this.bg = this.ansi_colors[1][(num - 100)]; 319 | } 320 | else if (num === 38 || num === 48) { 321 | if (sgr_cmds.length > 0) { 322 | var is_foreground = (num === 38); 323 | var mode_cmd = sgr_cmds.shift(); 324 | if (mode_cmd === '5' && sgr_cmds.length > 0) { 325 | var palette_index = parseInt(sgr_cmds.shift(), 10); 326 | if (palette_index >= 0 && palette_index <= 255) { 327 | if (is_foreground) 328 | this.fg = this.palette_256[palette_index]; 329 | else 330 | this.bg = this.palette_256[palette_index]; 331 | } 332 | } 333 | if (mode_cmd === '2' && sgr_cmds.length > 2) { 334 | var r = parseInt(sgr_cmds.shift(), 10); 335 | var g = parseInt(sgr_cmds.shift(), 10); 336 | var b = parseInt(sgr_cmds.shift(), 10); 337 | if ((r >= 0 && r <= 255) && (g >= 0 && g <= 255) && (b >= 0 && b <= 255)) { 338 | var c = { rgb: [r, g, b], class_name: 'truecolor' }; 339 | if (is_foreground) 340 | this.fg = c; 341 | else 342 | this.bg = c; 343 | } 344 | } 345 | } 346 | } 347 | } 348 | }; 349 | AnsiUp.prototype.transform_to_html = function (fragment) { 350 | var txt = fragment.text; 351 | if (txt.length === 0) 352 | return txt; 353 | txt = this.escape_txt_for_html(txt); 354 | if (!fragment.bold && !fragment.italic && !fragment.underline && fragment.fg === null && fragment.bg === null) 355 | return txt; 356 | var styles = []; 357 | var classes = []; 358 | var fg = fragment.fg; 359 | var bg = fragment.bg; 360 | if (fragment.bold) 361 | styles.push('font-weight:bold'); 362 | if (fragment.italic) 363 | styles.push('font-style:italic'); 364 | if (fragment.underline) 365 | styles.push('text-decoration:underline'); 366 | if (!this._use_classes) { 367 | if (fg) 368 | styles.push("color:rgb(" + fg.rgb.join(',') + ")"); 369 | if (bg) 370 | styles.push("background-color:rgb(" + bg.rgb + ")"); 371 | } 372 | else { 373 | if (fg) { 374 | if (fg.class_name !== 'truecolor') { 375 | classes.push(fg.class_name + "-fg"); 376 | } 377 | else { 378 | styles.push("color:rgb(" + fg.rgb.join(',') + ")"); 379 | } 380 | } 381 | if (bg) { 382 | if (bg.class_name !== 'truecolor') { 383 | classes.push(bg.class_name + "-bg"); 384 | } 385 | else { 386 | styles.push("background-color:rgb(" + bg.rgb.join(',') + ")"); 387 | } 388 | } 389 | } 390 | var class_string = ''; 391 | var style_string = ''; 392 | if (classes.length) 393 | class_string = " class=\"" + classes.join(' ') + "\""; 394 | if (styles.length) 395 | style_string = " style=\"" + styles.join(';') + "\""; 396 | return "" + txt + ""; 397 | }; 398 | ; 399 | AnsiUp.prototype.process_hyperlink = function (pkt) { 400 | var parts = pkt.url.split(':'); 401 | if (parts.length < 1) 402 | return ''; 403 | if (!this._url_whitelist[parts[0]]) 404 | return ''; 405 | var result = "" + this.escape_txt_for_html(pkt.text) + ""; 406 | return result; 407 | }; 408 | return AnsiUp; 409 | }()); 410 | function rgx(tmplObj) { 411 | var subst = []; 412 | for (var _i = 1; _i < arguments.length; _i++) { 413 | subst[_i - 1] = arguments[_i]; 414 | } 415 | var regexText = tmplObj.raw[0]; 416 | var wsrgx = /^\s+|\s+\n|\s*#[\s\S]*?\n|\n/gm; 417 | var txt2 = regexText.replace(wsrgx, ''); 418 | return new RegExp(txt2); 419 | } 420 | function rgxG(tmplObj) { 421 | var subst = []; 422 | for (var _i = 1; _i < arguments.length; _i++) { 423 | subst[_i - 1] = arguments[_i]; 424 | } 425 | var regexText = tmplObj.raw[0]; 426 | var wsrgx = /^\s+|\s+\n|\s*#[\s\S]*?\n|\n/gm; 427 | var txt2 = regexText.replace(wsrgx, ''); 428 | return new RegExp(txt2, 'g'); 429 | } 430 | //# sourceMappingURL=ansi_up.js.map 431 | Object.defineProperty(exports, "__esModule", { value: true }); 432 | exports.default = AnsiUp; 433 | })); 434 | -------------------------------------------------------------------------------- /js/misc.js: -------------------------------------------------------------------------------- 1 | function openTab(evt, tabName) { 2 | var i, x, tablinks; 3 | x = document.getElementsByClassName("content-tab"); 4 | for (i = 0; i < x.length; i++) { 5 | x[i].style.display = "none"; 6 | } 7 | tablinks = document.getElementsByClassName("tab"); 8 | for (i = 0; i < x.length; i++) { 9 | tablinks[i].className = tablinks[i].className.replace(" is-active", ""); 10 | } 11 | document.getElementById(tabName).style.display = "block"; 12 | evt.currentTarget.className += " is-active"; 13 | } 14 | 15 | function getSelectValues(elm) { 16 | var result = []; 17 | var options = elm && elm.options; 18 | var opt; 19 | for (var i=0, iLen=options.length; i; 4 | use YAMLish; 5 | use DBIish; 6 | use Time::Crontab; 7 | 8 | my $root = %*ENV || %*ENV ~ '/.sparky/projects'; 9 | my %conf; 10 | 11 | sub sparky-http-root is export { 12 | 13 | %*ENV || ""; 14 | 15 | } 16 | 17 | sub sparky-host is export { 18 | 19 | get-sparky-conf() || "0.0.0.0"; 20 | 21 | } 22 | 23 | sub sparky-use-tls is export { 24 | 25 | get-sparky-conf(); 26 | 27 | } 28 | 29 | sub sparky-tls-settings is export { 30 | get-sparky-conf() 31 | } 32 | 33 | sub sparky-tcp-port is export { 34 | 35 | get-sparky-conf() || 4000; 36 | 37 | } 38 | 39 | sub sparky-api-token is export { 40 | 41 | get-sparky-conf(); 42 | 43 | } 44 | 45 | sub sparky-auth is export { 46 | get-sparky-conf() || %( 47 | default => True, 48 | users => [ 49 | { 50 | login => "admin", 51 | # default password is admin 52 | password => "456b7016a916a4b178dd72b947c152b7" # md5sum('admin') 53 | }, 54 | ] 55 | ); 56 | } 57 | 58 | sub sparky-with-flapper is export { 59 | 60 | ! ( get-sparky-conf() || False ) && 61 | ! %*ENV 62 | 63 | } 64 | 65 | sub sparky-allow-rebuild-spawn is export { 66 | 67 | get-sparky-conf() || False; 68 | 69 | } 70 | 71 | sub get-sparky-conf is export { 72 | 73 | return %conf if %conf; 74 | 75 | my $conf-file = %*ENV ~ '/sparky.yaml'; 76 | 77 | # say ">>> ", $conf-file.IO.slurp; 78 | 79 | # say ">>> parse sparky yaml config from: $conf-file"; 80 | 81 | %conf = $conf-file.IO ~~ :f ?? load-yaml($conf-file.IO.slurp) !! Hash.new; 82 | 83 | return %conf; 84 | 85 | } 86 | 87 | sub get-database-engine is export { 88 | 89 | my %conf = get-sparky-conf(); 90 | 91 | if %conf && %conf { 92 | return %conf 93 | } else { 94 | return "sqlite" 95 | } 96 | } 97 | 98 | multi sub get-dbh ( $dir ) is export { 99 | 100 | #return $dbh if $dbh; 101 | 102 | my $dbh; 103 | 104 | my %conf = get-sparky-conf(); 105 | 106 | if %conf && %conf && %conf !~~ / :i sqlite / { 107 | 108 | $dbh = DBIish.connect( 109 | %conf, 110 | host => %conf, 111 | port => %conf, 112 | database => %conf, 113 | user => %conf, 114 | password => %conf, 115 | ); 116 | 117 | #say "load {%conf} dbh"; 118 | 119 | } else { 120 | 121 | $dbh = DBIish.connect("SQLite", database => "$dir/../db.sqlite3".IO.absolute ); 122 | 123 | say "{DateTime.now} --- load sqlite dbh for: " ~ ("$dir/../db.sqlite3".IO.absolute); 124 | 125 | } 126 | 127 | return $dbh 128 | 129 | } 130 | 131 | 132 | multi sub get-dbh { 133 | 134 | #return $dbh if $dbh; 135 | 136 | my $dbh; 137 | 138 | my %conf = get-sparky-conf(); 139 | 140 | if %conf && %conf && %conf !~~ / :i sqlite / { 141 | 142 | $dbh = DBIish.connect( 143 | %conf, 144 | host => %conf, 145 | port => %conf, 146 | database => %conf, 147 | user => %conf, 148 | password => %conf, 149 | ); 150 | 151 | } else { 152 | 153 | my $db-name = "$root/db.sqlite3"; 154 | $dbh = DBIish.connect("SQLite", database => $db-name ); 155 | 156 | } 157 | 158 | return $dbh; 159 | 160 | } 161 | 162 | sub build-is-running ( $dir ) { 163 | 164 | my $project = $dir.IO.basename; 165 | 166 | my @proc-check-cmd = ("bash", "-c", "ps aux | grep sparky-runner.raku | grep '\\--marker=$project ' | grep -v grep"); 167 | 168 | my $proc-run = run @proc-check-cmd, :out; 169 | 170 | if $proc-run.exitcode == 0 { 171 | 172 | $proc-run.out.get ~~ m/(\d+)/; 173 | 174 | my $pid = $0; 175 | 176 | say "{DateTime.now} --- [$project] build already running, pid: $pid SKIP ... "; 177 | 178 | return True 179 | 180 | } else { 181 | 182 | return False 183 | } 184 | 185 | } 186 | 187 | sub builds-running-cnt { 188 | 189 | my @proc-check-cmd = ("bash", "-c", "ps aux | grep sparky-runner.raku | grep -v grep | wc -l"); 190 | 191 | my $proc-run = run @proc-check-cmd, :out; 192 | 193 | if $proc-run.exitcode == 0 { 194 | 195 | $proc-run.out.get ~~ m/(\d+)/; 196 | 197 | my $cnt = $0; 198 | 199 | say "{DateTime.now} --- sparky jobs running, cnt: $cnt"; 200 | 201 | return $cnt 202 | 203 | } else { 204 | 205 | return 0 206 | } 207 | 208 | } 209 | 210 | sub schedule-build ( $dir, %opts? ) is export { 211 | 212 | my $project = $dir.IO.basename; 213 | 214 | my %config = Hash.new; 215 | 216 | #my $jobs-cnt = builds-running-cnt(); 217 | 218 | #if %*ENV { 219 | # if $jobs-cnt >= %*ENV { 220 | # say "{DateTime.now} --- $jobs-cnt builds run, SPARKY_MAX_JOBS={%*ENV}, SKIP ... "; 221 | # return; 222 | # } 223 | #} 224 | 225 | if "$dir/sparky.yaml".IO ~~ :f { 226 | 227 | say "{DateTime.now} --- sparkyd: parse sparky job yaml config from: $dir/sparky.yaml"; 228 | 229 | try { %config = load-yaml(slurp "$dir/sparky.yaml") }; 230 | 231 | if $! { 232 | my $error = $!; 233 | say "{DateTime.now} --- sparkyd: error parsing $dir/sparky.yaml"; 234 | say $error; 235 | return "{DateTime.now} --- sparkyd: remove build from schedulling" 236 | } 237 | 238 | } 239 | 240 | if %config { 241 | say "{DateTime.now} --- [$project] build is disabled, SKIP ... "; 242 | return; 243 | } 244 | 245 | # check triggered jobs 246 | 247 | my $trigger-file; 248 | my $run-by-trigger = False; 249 | 250 | if "{$dir}/.triggers/".IO ~~ :d { 251 | for dir("{$dir}/.triggers/".sort({.IO.changed})) -> $file { 252 | $run-by-trigger = True; 253 | $trigger-file = $file.IO.absolute; 254 | last; 255 | } 256 | } 257 | 258 | if $run-by-trigger { 259 | 260 | say "{DateTime.now} --- [$project] build trigerred by file trigger <$trigger-file> ..."; 261 | 262 | if ! build-is-running($dir) { 263 | 264 | Proc::Async.new( 265 | 'sparky-runner.raku', 266 | "--marker=$project", 267 | "--dir=$dir", 268 | "--trigger=$trigger-file", 269 | "--make-report" 270 | ).start; 271 | 272 | } 273 | 274 | } 275 | 276 | # schedulling cron jobs 277 | 278 | if %config and ! %*ENV and ! %opts { 279 | 280 | my $crontab = %config; 281 | 282 | my $tc = Time::Crontab.new(:$crontab); 283 | 284 | if $tc.match(DateTime.now, :truncate(True)) { 285 | 286 | my $cron-lock-file = "{$dir}/../../work/{$project}/.lock/cron"; 287 | 288 | if $cron-lock-file.IO ~~ :f && ( now - "{$cron-lock-file}".IO.modified ).Int < 60 { 289 | say "{DateTime.now} --- [$project] cron lock file exists with an age less then 60 secs, SKIP ..."; 290 | next; 291 | } 292 | 293 | say "{DateTime.now} --- [$project] build queued by cron trigger: <$crontab> ..."; 294 | 295 | mkdir "{$dir}/../../work/{$project}/.lock/" unless "{$dir}/../../work/{$project}/.lock/".IO ~~ :d; 296 | 297 | $cron-lock-file.IO.spurt(""); 298 | 299 | my $id = "{('a' .. 'z').pick(20).join('')}{$*PID}"; 300 | 301 | mkdir "$dir/.triggers"; 302 | 303 | spurt "$dir/.triggers/$id", "%( 304 | description => 'triggered by cron' 305 | )"; 306 | } elsif %config { 307 | say "{DateTime.now} --- [$project] build is skipped by cron, by will be tried on scm basis"; 308 | } else { 309 | say "{DateTime.now} --- [$project] build is skipped by cron: $crontab ... "; 310 | return; 311 | } 312 | } 313 | 314 | # schedulling scm jobs 315 | 316 | if %config { 317 | 318 | my $scm-url = %config; 319 | 320 | my $scm-branch = %config || 'master'; 321 | 322 | my $scm-dir = "{$dir}/../../work/{$project}/.scm"; 323 | 324 | mkdir $scm-dir unless $scm-dir.IO ~~ :d; 325 | 326 | say "{DateTime.now} --- scm: fetch commits from {$scm-url} {$scm-branch} ..."; 327 | 328 | shell("timeout 10 git ls-remote {$scm-url} {$scm-branch} 1>{$scm-dir}/data; echo \$? > {$scm-dir}/exit-code"); 329 | 330 | my $ex-code = "{$scm-dir}/exit-code".IO.slurp.chomp; 331 | 332 | if $ex-code ne "0" { 333 | say "{DateTime.now} --- scm: {$scm-url} {$scm-branch} - bad exit code - {$ex-code}"; 334 | return $ex-code; 335 | } else { 336 | say "{DateTime.now} --- scm: {$scm-url} {$scm-branch} - good exit code - {$ex-code}"; 337 | } 338 | 339 | my $commit-data = "{$scm-dir}/data".IO.slurp.chomp; 340 | 341 | my $current-commit; 342 | 343 | if $commit-data ~~ /^^ (\S+) / { 344 | $current-commit = "{$0}"; 345 | } 346 | 347 | my $current-commit-short = ($current-commit ~~ /\S/) ?? $current-commit.chop(32) !! "HEAD"; 348 | 349 | if $current-commit ~~ /\S/ { 350 | 351 | my $last-commit; 352 | 353 | my $trigger-build = False; 354 | 355 | if "{$scm-dir}/last.commit".IO ~~ :f { 356 | $last-commit = "{$scm-dir}/last.commit".IO.slurp; 357 | if $current-commit ne $last-commit { 358 | $trigger-build = True; 359 | "{$scm-dir}/last.commit".IO.spurt($current-commit); 360 | } 361 | 362 | } else { 363 | "{$scm-dir}/last.commit".IO.spurt($current-commit); 364 | $trigger-build = True; 365 | } 366 | 367 | if $trigger-build { 368 | 369 | my $id = "{('a' .. 'z').pick(20).join('')}{$*PID}"; 370 | 371 | mkdir "$dir/.triggers"; 372 | 373 | my %trigger = %( 374 | description => "run by scm {$scm-branch} [{$current-commit-short}]" 375 | ); 376 | 377 | %trigger = %( 378 | tags => "SCM_SHA={$current-commit-short},SCM_URL={$scm-url},SCM_BRANCH={$scm-branch}" 379 | ); 380 | 381 | spurt "$dir/.triggers/$id", %trigger.perl; 382 | 383 | } 384 | 385 | } 386 | 387 | return; 388 | 389 | } 390 | 391 | # handle other jobs (none crontab and scm) 392 | 393 | if !%config && !%config { 394 | say "{DateTime.now} --- [$project] neither crontab nor scm setup found, consider manual start, SKIP ... "; 395 | return; 396 | } 397 | 398 | 399 | } 400 | 401 | sub find-triggers ($root) is export { 402 | 403 | my @triggers; 404 | 405 | for dir($root) -> $dir { 406 | 407 | next if "$dir".IO ~~ :f; 408 | next if $dir.basename eq '.git'; 409 | next if $dir.basename eq '.reports'; 410 | next if $dir.basename eq 'db.sqlite3-journal'; 411 | next unless "$dir/sparrowfile".IO ~~ :f; 412 | 413 | my $project = $dir.IO.basename; 414 | 415 | if "{$dir}/.triggers/".IO ~~ :d { 416 | for dir("{$dir}/.triggers/") -> $file { 417 | say ">> load trigger from file $file ..."; 418 | my %trigger = EVALFILE($file); 419 | %trigger = $project; 420 | %trigger = $file; 421 | %trigger
= $file.IO.modified.DateTime; 422 | %trigger = $file.IO.slurp; 423 | push @triggers, %trigger; 424 | } 425 | } 426 | 427 | } 428 | 429 | return @triggers; 430 | } 431 | 432 | sub trigger-exists ($root,$project,$job-id) is export { 433 | 434 | if "{$root}/$project/.triggers/{$job-id}".IO ~~ :f { 435 | return True 436 | } else { 437 | return False 438 | } 439 | 440 | } 441 | 442 | sub job-state-exists ($root,$project,$job-id) is export { 443 | 444 | if "{$root}/../work/$project/.states/$job-id".IO ~~ :f { 445 | return True 446 | } else { 447 | return False 448 | } 449 | 450 | } 451 | 452 | sub job-state ($root,$project,$job-id) is export { 453 | 454 | "{$root}/../work/$project/.states/$job-id".IO.slurp 455 | 456 | } 457 | 458 | sub cache-root is export { 459 | 460 | "{%*ENV}/.sparky/"; 461 | 462 | } 463 | -------------------------------------------------------------------------------- /lib/Sparky/HTML.rakumod: -------------------------------------------------------------------------------- 1 | unit module Sparky::HTML; 2 | 3 | use Sparky; 4 | use Sparky::Security; 5 | 6 | my $bulma-version = "1.0.1"; 7 | 8 | sub css () is export { 9 | 10 | my %conf = get-sparky-conf(); 11 | 12 | qq:to /HERE/ 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | HERE 22 | 23 | } 24 | 25 | sub navbar (Mu $user?, Mu $token?) is export { 26 | 27 | qq:to /HERE/ 28 | 70 | HERE 71 | 72 | } 73 | 74 | -------------------------------------------------------------------------------- /lib/Sparky/Job.rakumod: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | unit module Sparky::Job; 4 | use JSON::Fast; 5 | 6 | sub job-queue-fs (%config,%trigger,$sparrowfile,$sparrowdo-config) is export { 7 | 8 | my $project = %config; 9 | 10 | my $job-id = %config; 11 | 12 | my $sparky-project-dir = "{%*ENV}/.sparky/projects/{$project}"; 13 | 14 | mkdir "{$sparky-project-dir}/.triggers" unless "{$sparky-project-dir}/.triggers".IO ~~ :d; 15 | 16 | unless "{$sparky-project-dir}/sparrowfile".IO ~~ :f { 17 | spurt "{$sparky-project-dir}/sparrowfile", "# dummy file, generated by sparrowdo"; 18 | } 19 | 20 | my $cache-dir = "{%*ENV}/.sparky/.cache/$job-id/"; 21 | 22 | mkdir $cache-dir; 23 | 24 | "{$cache-dir}/config.pl6".IO.spurt($sparrowdo-config.perl); 25 | 26 | %trigger = $cache-dir; 27 | 28 | # override parent job sparrowdo configuration 29 | # by %config 30 | 31 | %trigger ||= {}; 32 | 33 | if %config { 34 | for %config.keys -> $k { 35 | %trigger{$k} = %config{$k}; 36 | } 37 | # handle conflicting parameters 38 | if %config { 39 | %trigger:delete; 40 | %trigger:delete; 41 | } elsif %config { 42 | %trigger:delete; 43 | %trigger:delete; 44 | } elsif %config { 45 | %trigger:delete; 46 | %trigger:delete; 47 | } 48 | if %config { 49 | %trigger:delete; 50 | } 51 | } 52 | 53 | 54 | %trigger = %config || "spawned job"; 55 | 56 | # override sparrowdo tags by %config 57 | 58 | if %config { 59 | %trigger = %config.map({ 60 | my $k = $_.key; 61 | my $v = $_.value; 62 | $v = "" unless defined($v); 63 | my $v-safe = $v.subst(',',"___comma___",:g).subst('=','___eq___',:g); 64 | "{$k}={$v-safe}" 65 | }).join(","); 66 | } 67 | 68 | %trigger = "config.pl6"; 69 | 70 | say "job-queue-fs: create trigger file: {$sparky-project-dir}/.triggers/$job-id"; 71 | 72 | "{$sparky-project-dir}/.triggers/$job-id".IO.spurt(%trigger.perl); 73 | 74 | "{$cache-dir}/sparrowfile".IO.spurt($sparrowfile); 75 | 76 | return { project => $project, job-id => $job-id }; 77 | 78 | } 79 | 80 | 81 | sub put-job-stash (%config,%data) is export { 82 | 83 | my $project = %config; 84 | 85 | my $job-id = %config; 86 | 87 | my $sparky-project-dir = "{%*ENV}/.sparky/projects/{$project}"; 88 | 89 | mkdir "{$sparky-project-dir}/.stash" unless "{$sparky-project-dir}/.stash".IO ~~ :d; 90 | 91 | say "put-job-stash: put data to stash - {$sparky-project-dir}/.stash/$job-id ..."; 92 | 93 | "{$sparky-project-dir}/.stash/$job-id".IO.spurt(to-json(%data)); 94 | 95 | return { path => "{$sparky-project-dir}/.stash/$job-id" }; 96 | } 97 | 98 | sub put-job-file ($project,$job-id,$filename,$data) is export { 99 | 100 | my $sparky-project-dir = "{%*ENV}/.sparky/projects/{$project}"; 101 | 102 | mkdir "{$sparky-project-dir}/.files" unless "{$sparky-project-dir}/.files".IO ~~ :d; 103 | 104 | mkdir "{$sparky-project-dir}/.files/{$job-id}" unless "{$sparky-project-dir}/.files/{$job-id}".IO ~~ :d; 105 | 106 | say "put-job-file: create job file - {$sparky-project-dir}/.files/{$job-id}/{$filename} ..."; 107 | 108 | "{$sparky-project-dir}/.files/{$job-id}/{$filename}".IO.spurt($data); 109 | 110 | return { path => "{$sparky-project-dir}/.files/{$job-id}/$filename" }; 111 | 112 | } 113 | 114 | sub get-job-stash ($project,$job-id) is export { 115 | 116 | my $sparky-project-dir = "{%*ENV}/.sparky/projects/{$project}"; 117 | 118 | return 119 | "{$sparky-project-dir}/.stash/$job-id".IO ~~ :f ?? 120 | "{$sparky-project-dir}/.stash/$job-id".IO.slurp !! 121 | '{}' 122 | } 123 | 124 | sub get-job-file ($project,$job-id,$filename) is export { 125 | 126 | my $sparky-project-dir = "{%*ENV}/.sparky/projects/{$project}"; 127 | 128 | return "{$sparky-project-dir}/.files/{$job-id}/{$filename}"; 129 | 130 | } 131 | 132 | sub get-job-dir ($project,$job-id) is export { 133 | 134 | my $sparky-project-dir = "{%*ENV}/.sparky/projects/{$project}"; 135 | 136 | return "{$sparky-project-dir}/.files/{$job-id}"; 137 | 138 | } 139 | -------------------------------------------------------------------------------- /lib/Sparky/Security.rakumod: -------------------------------------------------------------------------------- 1 | unit module Sparky::Security; 2 | use Sparky; 3 | use Sparky::Utils; 4 | use JSON::Fast; 5 | use YAMLish; 6 | 7 | sub gen-token is export { 8 | 9 | ("a".."z","A".."Z",0..9).flat.roll(8).join 10 | 11 | } 12 | 13 | sub check-user (Mu $user, Mu $token, $project?) is export { 14 | 15 | # anonymous access denied 16 | 17 | return False unless $user; 18 | 19 | return False unless $token; 20 | 21 | my $any-user = "*"; 22 | 23 | if "{cache-root()}/users/{$user}/tokens/{$token}".IO ~ :f { 24 | #say "user $user, token - $token - validation passed"; 25 | return True unless $project; 26 | my $list = load-acl-list(); 27 | # in case no ACL, allow all authenticated users to do all 28 | unless $list { 29 | say "check-user: no ACL found, allow user [$user] on default basis"; 30 | return True 31 | } 32 | 33 | say "check-user: ACL loaded: {$list.perl}"; 34 | 35 | if $list{$project} && 36 | $list{$project}.isa(List) && 37 | $list{$project}.Set{$user} { 38 | say "check-user: deny user [$user] build project [$project] on project deny basis"; 39 | return False; 40 | } elsif $list && 41 | $list.isa(List) && 42 | $list.Set{$user} { 43 | say "check-user: deny user [$user] build project [$project] on global deny basis"; 44 | return False; 45 | } elsif $list{$project} && 46 | $list{$project}.isa(List) && 47 | ( 48 | $list{$project}.Set{$user} or 49 | $list{$project}.Set{$any-user} 50 | ) { 51 | say "check-user: allow user [$user] to build project [$project] on project allow basis"; 52 | return True; 53 | } elsif $list && 54 | $list.isa(List) && 55 | ( 56 | $list.Set{$user} or 57 | $list.Set{$any-user} 58 | ) 59 | { 60 | say "check-user: allow user [$user] build project [$project] on global allow basis"; 61 | return True; 62 | } else { 63 | say "check-user: deny user [$user] build project [$project] on default basis"; 64 | return False 65 | } 66 | } else { 67 | say "check-user: user $user, token - $token - validation failed"; 68 | return False 69 | } 70 | } 71 | 72 | sub user-create-account (Mu $user, $data = {}) is export { 73 | 74 | mkdir "{cache-root()}/users"; 75 | 76 | mkdir "{cache-root()}/users/{$user}"; 77 | 78 | mkdir "{cache-root()}/users/{$user}/tokens"; 79 | 80 | "{cache-root()}/users/{$user}/meta.json".IO.spurt( 81 | to-json($data) 82 | ); 83 | 84 | say "auth: save user data to {cache-root()}/users/{$user}/meta.json"; 85 | 86 | my $tk = gen-token(); 87 | 88 | "{cache-root()}/users/$user/tokens/{$tk}".IO.spurt(""); 89 | 90 | say "auth: set user token to {$tk}"; 91 | 92 | return $tk; 93 | 94 | } 95 | 96 | 97 | sub load-acl-list { 98 | 99 | if "{%*ENV}/.sparky/acl/hosts/{hostname()}/list.yaml".IO ~~ :e { 100 | say "acl: load acl from {%*ENV}/.sparky/acl/hosts/{hostname()}/list.yaml"; 101 | return load-yaml("{%*ENV}/.sparky/acl/hosts/{hostname()}/list.yaml".IO.slurp); 102 | } elsif "{%*ENV}/.sparky/acl/list.yaml".IO ~~ :e { 103 | say "acl: load acl from {%*ENV}/.sparky/acl/list.yaml"; 104 | return load-yaml("{%*ENV}/.sparky/acl/list.yaml".IO.slurp); 105 | } else { 106 | return 107 | } 108 | 109 | } 110 | -------------------------------------------------------------------------------- /lib/Sparky/Utils.rakumod: -------------------------------------------------------------------------------- 1 | unit module Sparky::Utils; 2 | 3 | sub hostname () is export { 4 | 5 | return %*ENV ?? 6 | %*ENV !! 7 | qx[hostname].chomp; 8 | 9 | } 10 | 11 | 12 | sub get-template-var ($data,$path) is export { 13 | 14 | return unless $data; 15 | return unless $path; 16 | 17 | my $search = $data; 18 | 19 | for $path.split('.') -> $i { 20 | if $search{$i}:exists && $search{$i}.isa(Hash) { 21 | say "get-template-var: $i - enter new path"; 22 | $search = $search{$i} 23 | } elsif $search{$i}:exists { 24 | say "get-template-var: $i - found OK"; 25 | return $search{$i}; 26 | } else { 27 | say "get-template-var: $i - found FAIL"; 28 | return 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /logos/sparky.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/logos/sparky.png -------------------------------------------------------------------------------- /logos/sparky.small.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/melezhik/sparky/554356891dcbf7e5460d7209891ca62515406ca2/logos/sparky.small.png -------------------------------------------------------------------------------- /misc/yaml-test.raku: -------------------------------------------------------------------------------- 1 | use YAMLish; 2 | 3 | my $data = load-yaml('plugins: 4 | Sparky::Plugin::Email: 5 | parameters: 6 | subject: "I finished" 7 | to: "happy@user.email" 8 | text: "here will be log" 9 | Sparky::Plugin::Hello: 10 | parameters: 11 | name: Sparrow 12 | '); 13 | 14 | say $data.perl; 15 | 16 | for $data<>.kv -> $k, $v { 17 | say $k; 18 | } 19 | -------------------------------------------------------------------------------- /systemd/sparky-web.service: -------------------------------------------------------------------------------- 1 | 2 | [Unit] 3 | Description=sparky.web 4 | After=network.target 5 | 6 | [Service] 7 | Type=simple 8 | User=sparky 9 | WorkingDirectory=/home/sparky/sparky 10 | ExecStart=/usr/bin/bash --login -c "cd /home/sparky/sparky && perl6 bin/sparky-web.pl6" 11 | Restart=on-abort 12 | MemoryLimit=500M 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | 17 | -------------------------------------------------------------------------------- /systemd/sparkyd.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=sparkyd 3 | After=network.target 4 | 5 | [Service] 6 | Type=simple 7 | User=sparky 8 | ExecStart=/usr/bin/bash --login -c 'sparkyd' 9 | Restart=on-abort 10 | #MemoryLimit=400M 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | 15 | -------------------------------------------------------------------------------- /t/00-run.t: -------------------------------------------------------------------------------- 1 | use v6; 2 | use Test; 3 | plan 1; 4 | ok 1, "it's ok so far"; 5 | 6 | -------------------------------------------------------------------------------- /templates/about.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | 4 | ~SPARKY CI WEB~ | about 5 | 6 | 7 | <&HTML-AND-JAVASCRIPT(.navbar)> 8 | 9 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /templates/build.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | ~SPARKY CI WEB~ | Build project: <.project> 4 | 5 | 6 | <&HTML-AND-JAVASCRIPT(.navbar)> 7 |
8 |
9 |

10 | Build: <.project> 11 |

12 |
13 |
14 |
15 | 16 |
17 | <@group_vars: $g> 18 | 23 | 24 |
25 | 26 |
27 |
28 | 29 | <@vars: $i> 30 |
31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 |
57 | 62 | 63 | <@$i.values: $s> 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 |
73 | 74 |
75 | 76 |
77 |
78 |
79 | 80 |
81 |
82 | 83 | 84 |
85 |
86 | 87 | 188 | -------------------------------------------------------------------------------- /templates/builds.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | 4 | ~SPARKY CI WEB~ 5 | 6 | 7 | <&HTML-AND-JAVASCRIPT(.navbar)> 8 | 9 | 59 | -------------------------------------------------------------------------------- /templates/default_login.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | ~SPARKY CI WEB~ | Login 4 | 5 | <&HTML-AND-JAVASCRIPT(.navbar)> 6 |
7 |
8 |

9 | Login 10 |

11 |
12 | 13 | 14 |
<.message>
15 |
16 | 17 | 18 | 19 |
<.message>
20 |
21 | 22 |
23 |
24 |
25 |
26 | 27 | 28 | 29 | 30 |
31 |
32 |
33 |
34 | 35 |
36 |
37 | 38 |
39 |
40 | -------------------------------------------------------------------------------- /templates/project.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | 4 | ~SPARKY CI WEB~ | project: <.project> 5 | 6 | 7 | <&HTML-AND-JAVASCRIPT(.navbar)> 8 | 9 |
10 |
11 |

12 | Project: <.project> 13 |

14 |
15 | 16 |
17 | Build now 18 |
19 | 20 |
21 |
22 |
23 |

Configuration

24 | 25 | YAML Error 26 | 27 |
28 |
29 |
<.project-conf-str>
30 |
31 |
32 |
33 |
34 |
35 |
36 |

Job

37 |
38 |
39 |
<.scenario-code>
40 |
41 |
42 |
43 |
44 | -------------------------------------------------------------------------------- /templates/projects.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | ~SPARKY CI WEB~ 4 | 5 | <&HTML-AND-JAVASCRIPT(.navbar)> 6 | 65 | 66 | 106 | -------------------------------------------------------------------------------- /templates/queue.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | 4 | ~SPARKY CI WEB~ 5 | 6 | 7 | <&HTML-AND-JAVASCRIPT(.navbar)> 8 | 9 | 37 | -------------------------------------------------------------------------------- /templates/report2.crotmp: -------------------------------------------------------------------------------- 1 | <&HTML-AND-JAVASCRIPT(.css)> 2 | 3 | 4 | ~SPARKY CI WEB~ | build: <.project>@<.build_id> 5 | 6 | 7 | 8 | 9 | 10 | <&HTML-AND-JAVASCRIPT(.navbar)> 11 | 12 |
28 | 29 |
30 | 38 |
39 |
40 |
41 |

 42 |       
43 | 57 |
58 | 59 | 153 | -------------------------------------------------------------------------------- /utils/generate-cert.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | mkdir -p ~/.sparky/certs/ 4 | 5 | openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 \ 6 | -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.sparkyci.com" \ 7 | -keyout ~/.sparky/certs/www.sparkyci.com.key -out ~/.sparky/certs/www.sparkyci.com.cert 8 | -------------------------------------------------------------------------------- /utils/install-sparky-single-node.raku: -------------------------------------------------------------------------------- 1 | #!raku 2 | 3 | my $home = %*ENV; 4 | 5 | directory "$home/projects/Sparky"; 6 | 7 | bash "sudo dnf install -y git make"; 8 | 9 | #bash "zef install https://github.com/melezhik/Sparrow6.git --/test"; 10 | 11 | 12 | git-scm "https://github.com/melezhik/sparky.git", %( 13 | to => "$home/projects/Sparky"; 14 | ); 15 | 16 | bash "zef install .", %( 17 | :description, 18 | :cwd("$home/projects/Sparky"), 19 | ); 20 | -------------------------------------------------------------------------------- /utils/install-sparky-web-systemd.raku: -------------------------------------------------------------------------------- 1 | package-install "libtemplate-perl carton"; 2 | 3 | my $user = "sph"; 4 | 5 | systemd-service "sparky-web", %( 6 | user => $user, 7 | workdir => "/home/$user/projects/sparky", 8 | command => "/usr/bin/bash --login -c 'cro run 1>>~/.sparky/sparky.log 2>&1'" 9 | ); 10 | 11 | bash "systemctl daemon-reload"; 12 | 13 | # start service 14 | 15 | service-restart "sparky-web"; 16 | 17 | service-enable "sparky-web"; 18 | 19 | -------------------------------------------------------------------------------- /utils/install-sparky.raku: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | class Pipeline does Sparky::JobApi::Role { 4 | 5 | has Str $.comp = tags() || "main"; 6 | has Str $.ssh-user = tags() || "sparky"; 7 | has Str $.host = tags() || ""; 8 | has Str $.name = tags() || ""; 9 | has Str $.api-token = tags() || ""; 10 | has Str $.ssl = tags() || "True"; 11 | 12 | method !queue-libs() { 13 | 14 | my $j = self.new-job; 15 | 16 | say "host={$.host}"; 17 | say "ssh_user={$.ssh-user}"; 18 | 19 | $j.queue({ 20 | description => "sparky libs on {$.name}", 21 | tags => %( 22 | stage => "libs", 23 | host => $.host, 24 | name => $.name, 25 | ssh-user => $.ssh-user, 26 | ), 27 | sparrowdo => %( 28 | bootstrap => True, 29 | sudo => True, 30 | host => $.host, 31 | ssh_user => $.ssh-user, 32 | verbose => True, 33 | ); 34 | }); 35 | 36 | say "queue spawned job, ",$j.info.perl; 37 | 38 | my $s = self.wait-job($j); 39 | 40 | die if $s; 41 | 42 | } 43 | 44 | method !queue-raku-libs() { 45 | 46 | my $j = self.new-job; 47 | 48 | $j.queue({ 49 | description => "sparky raku libs on {$.name}", 50 | tags => %( 51 | stage => "raku-libs", 52 | host => $.host, 53 | name => $.name, 54 | ssh-user => $.ssh-user, 55 | api-token => $.api-token, 56 | ), 57 | sparrowdo => %( 58 | bootstrap => False, 59 | no_sudo => True, 60 | host => $.host, 61 | ssh_user => $.ssh-user, 62 | ); 63 | }); 64 | 65 | say "queue spawned job, ",$j.info.perl; 66 | 67 | my $s = self.wait-job($j); 68 | 69 | die if $s; 70 | 71 | } 72 | 73 | method !queue-services() { 74 | 75 | my $j = self.new-job; 76 | 77 | $j.queue({ 78 | description => "sparky services on {$.name}", 79 | tags => %( 80 | stage => "services", 81 | host => $.host, 82 | name => $.name, 83 | ssh-user => $.ssh-user, 84 | ssl => $.ssl, 85 | ), 86 | sparrowdo => %( 87 | bootstrap => False, 88 | no_sudo => False, 89 | host => $.host, 90 | ssh_user => $.ssh-user, 91 | ); 92 | }); 93 | 94 | say "queue spawned job, ",$j.info.perl; 95 | 96 | my $s = self.wait-job($j); 97 | 98 | die if $s; 99 | 100 | } 101 | 102 | 103 | method stage-main() { 104 | 105 | my @q; 106 | 107 | for config()<> -> $w { 108 | 109 | my $j = self.new-job: :project("install-sparky-{$w}"); 110 | 111 | $j.queue({ 112 | description => "bootstrap sparky on {$w}", 113 | tags => %( 114 | stage => "worker", 115 | host => $w, 116 | name => $w, 117 | ssh-user => $w, 118 | ssl => $.ssl, 119 | comp => $.comp, 120 | ), 121 | }); 122 | 123 | @q.push: $j; 124 | 125 | say "queue spawned job:", $j.info.perl; 126 | 127 | } 128 | 129 | my $s = self.wait-jobs(@q); 130 | 131 | die if $s; 132 | 133 | 134 | } 135 | 136 | method stage-worker() { 137 | 138 | self!queue-libs() if $.comp eq "main" or $.comp eq "libs"; 139 | 140 | self!queue-raku-libs() if $.comp eq "main" or $.comp eq "raku-libs"; 141 | 142 | self!queue-services() if $.comp eq "main" or $.comp eq "services"; 143 | 144 | } 145 | 146 | method stage-libs { 147 | 148 | package-install "libssl-dev"; 149 | package-install "libtemplate-perl carton"; 150 | 151 | } 152 | 153 | method stage-raku-libs() { 154 | 155 | bash "zef --version || /opt/rakudo-pkg/bin/install-zef"; 156 | 157 | for 'https://github.com/melezhik/sparrowdo.git', 158 | 'https://github.com/melezhik/sparky.git', 159 | 'https://github.com/melezhik/sparky-job-api.git' -> $i { 160 | 161 | zef $i, %( notest => True ); 162 | 163 | } 164 | 165 | directory "/home/{$.ssh-user}/projects/"; 166 | 167 | directory "/home/{$.ssh-user}/projects/Sparky"; 168 | 169 | git-scm "https://github.com/melezhik/sparky.git", %( 170 | to => "/home/{$.ssh-user}/projects/Sparky"; 171 | ); 172 | 173 | bash "if test -f ~/.sparky/projects/db.sqlite3; then echo db.sqlite3 exists; else raku db-init.raku; fi", %( 174 | description => "create sparky database", 175 | cwd => "/home/{$.ssh-user}/projects/Sparky" 176 | ); 177 | 178 | } 179 | 180 | method stage-services() { 181 | 182 | my $sc; # sparky config 183 | 184 | if $.api-token { 185 | $sc~="SPARKY_API_TOKEN: {$.api-token}\n"; 186 | } 187 | 188 | if $.ssl eq 'True' { 189 | 190 | $sc~="SPARKY_USE_TLS: True\n"; 191 | 192 | my %state = task-run "create", "openssl-cert", %( 193 | CN => "www.{$.host}" 194 | ); 195 | 196 | file "/home/{$.ssh-user}/.sparky/key", %( 197 | owner => "{$.ssh-user}", 198 | content => %state, 199 | ); 200 | 201 | file "/home/{$.ssh-user}/.sparky/cert", %( 202 | owner => "{$.ssh-user}", 203 | content => %state, 204 | ); 205 | 206 | $sc~="tls:\n private-key-file: /home/{$.ssh-user}/.sparky/key\n"; 207 | $sc~="\n certificate-file: /home/{$.ssh-user}/.sparky/cert\n"; 208 | 209 | } 210 | 211 | "/home/{$.ssh-user}/sparky.yaml".IO.spurt($sc); 212 | 213 | systemd-service "sparky-web", %( 214 | user => $.ssh-user, 215 | workdir => "/home/{$.ssh-user}/projects/Sparky", 216 | command => "/usr/bin/bash --login -c 'export PATH=~/.raku/bin:\$PATH && cd /home/{$.ssh-user}/projects/Sparky && cro run'" 217 | ); 218 | 219 | systemd-service "sparkyd", %( 220 | user => $.ssh-user, 221 | workdir => "/home/{$.ssh-user}/projects/Sparky", 222 | command => "/usr/bin/bash --login -c 'export PATH=~/.raku/bin:\$PATH && sparkyd'" 223 | ); 224 | 225 | sleep(3); 226 | 227 | bash "systemctl daemon-reload"; 228 | 229 | #service-restart "sparky-web"; 230 | 231 | service-enable "sparky-web"; 232 | 233 | #service-restart "sparkyd"; 234 | 235 | service-enable "sparkyd"; 236 | 237 | } 238 | 239 | } 240 | 241 | 242 | Pipeline.new.run(); 243 | 244 | -------------------------------------------------------------------------------- /utils/install-sparky.sh: -------------------------------------------------------------------------------- 1 | sparrowdo \ 2 | --desc="install sparky" \ 3 | --localhost \ 4 | --sparrowfile=utils/install-sparky.raku \ 5 | --tags=api-token=secret \ 6 | --conf=conf/sparky-cluster.raku \ 7 | --no_sudo \ 8 | --with_sparky 9 | -------------------------------------------------------------------------------- /utils/install-sparkyd-systemd.raku: -------------------------------------------------------------------------------- 1 | my $user = "sph"; 2 | 3 | systemd-service "sparkyd", %( 4 | user => $user, 5 | workdir => "/home/$user/projects/sparky", 6 | command => "/usr/bin/bash --login -c 'sparkyd --timeout=20 2>&1 1>>~/.sparky/sparkyd.log'" 7 | ); 8 | 9 | # start service 10 | 11 | service-restart "sparkyd"; 12 | service-enable "sparkyd"; 13 | 14 | -------------------------------------------------------------------------------- /utils/update-sparky.raku: -------------------------------------------------------------------------------- 1 | use Sparky::JobApi; 2 | 3 | class Pipeline does Sparky::JobApi::Role { 4 | 5 | method stage-update { 6 | 7 | #bash "echo 'SPARKY_API_TOKEN: {tags()}' > ~/sparky.yaml", %( 8 | # description => "set token" 9 | #); 10 | 11 | for 'Sparky', 'sparky-job-api', 'sparrowdo' -> $app { 12 | 13 | say "update [$app] ..."; 14 | 15 | directory "{%*ENV}/projects/$app"; 16 | 17 | git-scm "https://github.com/melezhik/{$app}.git", %( 18 | to => "{%*ENV}/projects/$app" 19 | ); 20 | 21 | chdir "{%*ENV}/projects/$app"; 22 | 23 | zef '.', %( force => True ); 24 | 25 | } 26 | 27 | } 28 | 29 | 30 | method stage-main { 31 | 32 | my @q; 33 | 34 | for config()<> -> $w { 35 | 36 | my $j = self.new-job: :project("update-sparky"), :api($w); 37 | 38 | $j.queue({ 39 | description => "sparky update", 40 | tags => %( 41 | stage => "update", 42 | ), 43 | #sparrowdo => %( 44 | # no_sudo => True, 45 | # bootstrap => False 46 | #), 47 | }); 48 | 49 | say "queue spawned job, ",$j.info.perl; 50 | @q.push: $j; 51 | 52 | } 53 | 54 | my $s = self.wait-jobs(@q); 55 | 56 | die if $s; 57 | 58 | } 59 | 60 | } 61 | 62 | Pipeline.new.run; 63 | -------------------------------------------------------------------------------- /utils/update-sparky.sh: -------------------------------------------------------------------------------- 1 | sparrowdo --desc="update sparky" \ 2 | --localhost \ 3 | --sparrowfile=utils/update-sparky.raku \ 4 | --conf=conf/sparky-cluster.raku --no_sudo \ 5 | --with_sparky 6 | -------------------------------------------------------------------------------- /watcher.sh: -------------------------------------------------------------------------------- 1 | set -x 2 | set -e 3 | 4 | if ps uax|grep bin/sparky-web.raku|grep -v grep -q; then 5 | echo "sparky-web already running" 6 | else 7 | cd ~/projects/sparky 8 | export SPARKY_HTTP_ROOT="/sparky" 9 | export SPARKY_ROOT=/home/rakudist/projects/RakuDist/sparky 10 | export BAILADOR=host:0.0.0.0,port:5000 11 | nohup raku bin/sparky-web.raku > sparky-web.log & 12 | fi 13 | --------------------------------------------------------------------------------