├── .dockerignore
├── .gitignore
├── Dockerfile
├── LICENSE
├── README.md
├── SECURITY.md
├── build.zsh
├── codebase
├── anka_install_create_clone.zsh
├── autopkg_tools.py
├── helpers
│ ├── anka_bootstrap.zsh
│ └── slack_notify.zsh
└── main_orchestrator.zsh
├── config.json
├── example-recipes
├── AdobeAcrobatProDC.pkg.recipe
├── AndroidStudio.pkg.recipe
├── BraveBrowser.pkg.recipe
├── CacheRecipeMetadata
│ ├── CacheRecipeMetadata.py
│ └── io.kandji.cachedata.recipe
├── Docker.pkg.recipe
├── GitHubDesktop.pkg.recipe
├── GoogleChrome.pkg.recipe
├── MicrosoftExcel.pkg.recipe
├── MicrosoftPowerPoint.pkg.recipe
├── MicrosoftRemoteDesktop.pkg.recipe
├── MicrosoftWord.pkg.recipe
├── PyCharmCE.pkg.recipe
├── TableauDesktop.pkg.recipe
├── VLC.pkg.recipe
└── Zoom.pkg.recipe
└── recipe_list.json
/.dockerignore:
--------------------------------------------------------------------------------
1 | README.md
2 | LICENSE
3 | build.zsh
4 | codebase/anka_install_create_clone.zsh
5 | Dockerfile
6 | autopkg-runner-results
7 |
8 | # Git files
9 | .git.*
10 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # Environments
7 | .env
8 | .venv
9 | env/
10 | venv/
11 | ENV/
12 | env.bak/
13 | venv.bak/
14 |
15 | # macOS stuff
16 | .DS_Store
17 |
18 | # Vim stuff
19 | .swp
20 |
21 | # Runtime-specific results
22 | autopkg-runner-results
23 | autopkg_metadata.json
24 | running_vms.json
25 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:bullseye-slim as base
2 | WORKDIR /app
3 |
4 | RUN apt-get update && apt-get install -y openssh-server jq zsh curl sshpass
5 |
6 | COPY . /app/
7 |
8 | # Instantiate Anka runner after dependencies are installed
9 | CMD [ "zsh", "/app/codebase/main_orchestrator.zsh"]
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2023 Kandji, Inc.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this
4 | software and associated documentation files (the "Software"), to deal in the Software
5 | without restriction, including without limitation the rights to use, copy, modify, merge,
6 | publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
7 | to whom the Software is furnished to do so, subject to the following conditions:
8 |
9 | The above copyright notice and this permission notice shall be included in all copies or
10 | substantial portions of the Software.
11 |
12 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
13 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
14 | PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
15 | FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
16 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
17 | DEALINGS IN THE SOFTWARE.
18 |
19 | autopkg_tools.py and cookbooks/
20 | Unless otherwise noted, all software in this repo is released under the 3-Clause
21 | BSD License
22 |
23 | Copyright (c) ZenPayroll, Inc., dba Gusto. All rights reserved.
24 | Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
25 |
26 | Redistribution and use in source and binary forms, with or without modification,
27 | are permitted provided that the following conditions are met:
28 |
29 | * Redistributions of source code must retain the above copyright notice, this
30 | list of conditions and the following disclaimer.
31 |
32 | * Redistributions in binary form must reproduce the above copyright notice,
33 | this list of conditions and the following disclaimer in the documentation
34 | and/or other materials provided with the distribution.
35 |
36 | * Neither the name Facebook nor the names of its contributors may be used to
37 | endorse or promote products derived from this software without specific
38 | prior written permission.
39 |
40 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
41 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
42 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
43 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
44 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
45 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
46 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
47 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
48 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
49 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
50 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # `C`acheless `A`utoPkg `R`unner `L`ocalized (`CARL`)
2 | * #### [ABOUT](#about-1)
3 | * #### [SETUP](#setup-1)
4 | * #### [RUNTIME](#runtime-1)
5 | * #### [RUNTIME COMPONENTS](#runtime-components-1)
6 | * #### [CREDITS](#credits-1)
7 |
8 | ---
9 | ## ABOUT
10 | - ### CARL is a local workflow to:
11 | - #### Configure and bootstrap an [Anka](https://veertu.com/anka-develop/) macOS VM for [AutoPkg](https://github.com/autopkg/autopkg) execution
12 | - #### Run specified AutoPkg recipes
13 | - #### Record and preserve download metadata in a JSON blob
14 | - #### For subsequent recipe runs, recreate AutoPkg download cache from previous JSON metadata
15 | - #### JSON-populated cache writes very quickly to disk and uses a fraction of the disk space
16 | - #### Recreated cache is equally performant when used to compare downloads/identify new updates
17 |
18 | > **Note**
19 | > The default config works just fine for getting started
20 | >
21 | > To jump right in, clone this repo and run `./build.zsh` from the root dir
22 | >
23 | > Otherwise, read on for setup instructions and technical details
24 |
25 |
26 | ## SETUP
27 | ### CONFIG
28 | - There are three configurable settings available in the [config.json](config.json):
29 | - `host_runtime`
30 | - `local`: **Default** setting, uses the local Mac to execute the end-to-end Cacheless AutoPkg workflow
31 | - `docker`: Builds from [Dockerfile](Dockerfile), copies over all dependencies, and orchestrates the Anka VM runtime
32 | - `local_autopkg_recipes_dir`
33 | - Can specify a directory path elsewhere on-disk containing .recipe files for execution
34 | - **Defaults** to [./example-recipes](example-recipes) folder at root dir
35 | - If `basename` of path (i.e. folder name) does not exist at root dir, it will be copied over and the [CacheRecipeMetadata](example-recipes/CacheRecipeMetadata) processor placed within
36 | - **NOTE**: To scope recipes for runtime, they must be added to [recipe_list.json](recipe_list.json)
37 | - `slack_notify`
38 | - `bool` value to disable Slack notifications for entire runtime
39 | - **Default** is `false`; results in no messages posting to Slack for any execution, only stdout and logging
40 | - Setting to `true` expects an ENV variable key `SLACK_WEBHOOK_TOKEN` with the value of a valid `hooks.slack.com` URL (see below)
41 | - On your local Mac, you may define `SLACK_WEBHOOK_TOKEN` as an envrionment variable
42 | - This is used to send messages to a specified Slack channel with updates for
43 | - AutoPkg Runner start
44 | - Any updates downloaded
45 | - Any updates built
46 | - Any failures
47 | - AutoPkg Runner end
48 | - ENV should be in the form of `SLACK_WEBHOOK_TOKEN=https://hooks.slack.com/services/XXXXXXXXX/XXXXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXX`
49 | - It can be added to `~/.zshrc`, `~/.zshenv`, or any other file that is sourced and makes ENV vars available to your Terminal session
50 | - To confirm `SLACK_WEBHOOK_TOKEN` is available to AutoPkg Runner, open a new Terminal window and run `export | grep SLACK_WEBHOOK_TOKEN`
51 | - **OPTIONAL INSTALL**: This workflow supports running from a containerized Docker image
52 | - You can download [Docker Desktop here](https://docs.docker.com/desktop/install/mac-install/) for your Mac's chipset
53 | - **IMPORTANT**: Due to a bug with Anka Develop, you must first start your cloned VM, _then_ launch Docker
54 | - See instructions above to specify `docker` as `host_runtime` in the [config.json](config.json)
55 |
56 | ### AUTOPKG RECIPES
57 | - This repo contains several example recipes (in the aptly named [example-recipes](example-recipes) folder), as well as [a JSON file](recipe_list.json) governing which recipes are run
58 | - You are free to add your own recipes, overrides, custom processors, etc.
59 | - See instructions above to specify `local_autopkg_recipes_dir` in the [config.json](config.json)
60 |
61 | ## RUNTIME
62 | ### BOOTSTRAPPING
63 |
64 | - Once your config is set, run [build.zsh](build.zsh) from the root directory.
65 | - If Anka isn't installed, you'll be prompted to enter your `sudo` password to execute [anka_install_create_clone.zsh](codebase/anka_install_create_clone.zsh), which:
66 | 1. Ensures you have appropriate rights (e.g. invoked with sudo)
67 | 2. Downloads, validates the security of, and installs the free Anka Develop client
68 | 3. Accepts the software license and offers to download/create a new VM running the latest macOS
69 | 4. Clones our newly downloaded VM and spins it up
70 | - If more than one VM is found, you will be prompted to select one to clone
71 | 5. Deletes the Anka installer
72 | 6. Once started, the cloned Anka VM has a default username:password of `anka`:`admin`
73 |
74 | ### END-TO-END FLOW
75 | #### Below runtime is for a [config.json](config.json) set as follows (not the default!):
76 |
77 | ```
78 | {
79 | "host_runtime": "docker",
80 | "local_autopkg_recipes_dir": "./example-recipes",
81 | "slack_notify": true
82 | }
83 | ```
84 |
85 | 1. Spins up a lightweight Docker container and connects to an active Anka VM runner
86 | 2. If previous AutoPkg run JSON results exist, they are copied over to the Anka VM alongside other dependencies
87 | 3. Remote VM is bootstrapped, AutoPkg installed and configured, and is ready to run some recipes
88 | 4. Runner executes `autopkg_tools.py`
89 | - Using the `--cache` flag, if existing, JSON metadata file is parsed and cache of all previously downloaded files + extended attributes (`xattrs`; e.g. `last-modified`) are recreated on-disk
90 | - Files are created using `mkfile -n`, where file size is noted upon object creation (e.g. `ls -la` shows reported size), but disk blocks aren't allocated (e.g. `du -sh` shows no actual disk usage)
91 | - Python's `os.path.getsize` (used by AutoPkg to read byte size) can also read in these no-block files and use them for comparison of both file size and `xattrs`
92 | - This allows us to very quickly recreate a directory of files that reads like multiple gigabytes with virtually no disk usage
93 | 5. `autopkg_tools.py` reads and locates all recipes specified in `recipe_list.json`
94 | 6. Target recipes will check cache (if present) and compare against available download
95 | 7. If download differs (according to `xattr` or alternate check of byte size only), update is downloaded and a subsequent PKG created
96 | 8. Results from new builds/downloads are concatenated into a combined `.plist`, metadata `.json` updated with any new downloads, and both files `scp`'d back to host endpoint
97 | 9. New and old metadata files are compared and reported on if shasum values differ
98 | 10. Status of new builds and `autopkg-runner` execution are reported back to Slack in the channel specified by `SLACK_WEBHOOK_TOKEN`
99 |
100 | ## RUNTIME COMPONENTS
101 | ### Primary
102 | - [main_orchestrator.zsh](codebase/main_orchestrator.zsh): Z shell
103 | - Runs prechecks to validate required dependencies are available/defined
104 | - Generates an SSH keypair and formats public key to be received by Anka VM
105 | - Clones a Mac VM and installs public SSH key
106 | - Copies over AutoPkg last run metadata (if present), as well as other req'd files
107 | - Remotely executes our bootstraper `anka_bootstrap.zsh`
108 | - Remotely executes our AutoPkg runner `autopkg_tools.py`
109 | - Copies back metadata, recipe receipts, and reports on changes to metadata
110 | - [autopkg_tools.py](codebase/autopkg_tools.py): Python 3
111 | - Iterates over and builds packages based on a list of recipes; called with flag `-l` and file `recipe_list.json`
112 | - Loads and writes out cached metadata from the last AutoPkg run, caches any new metadata post-run; called with flag `-c`
113 |
114 | ### Helpers
115 | - [anka_bootstrap.zsh](codebase/helpers/anka_bootstrap.zsh): Z shell
116 | - Checks for the existence of, and if missing, installs AutoPkg, Rosetta 2, and custom AutoPkg settings
117 | - [slack_notify.zsh](codebase/helpers/slack_notify.zsh): Z shell
118 | - Can be passed named args of `-status`, `-title`, `-text`, and optional `-host_info` (Hostname, Serial, OS, internal IP)
119 | - Sourced by `main_orchestrator.zsh`, `anka_bootstrap.zsh`
120 |
121 | ### Processors
122 |
123 | - [CacheRecipeMetadata](example-recipes/CacheRecipeMetadata): A folder containing the caching processor and recipe stub; present in the `example-recipes` folder
124 | - [CacheRecipeMetadata.py](example-recipes/CacheRecipeMetadata/CacheRecipeMetadata.py): Python AutoPkg postprocessor; executes after every individual AutoPkg recipe run, collects download metadata, and writes to `/tmp/autopkg_metadata.json`
125 | - [io.kandji.cachedata.recipe](example-recipes/CacheRecipeMetadata/io.kandji.cachedata.recipe): A recipe stub in the same relative directory as our Python code so AutoPkg knows how to identify and run the above
126 |
127 | ### Configuration
128 |
129 | - [recipe_list.json](recipe_list.json): A JSON blob populated by recipe names for execution
130 | - These recipes must be available within the folder path defined in [config.json](config.json)
131 | - They are sequentially run by invoking `autopkg_tools.py -l recipe_list.json`
132 | - **Defaults** to all recipes (see below) contained within the `example-recipes` directory
133 | ```
134 | [
135 | "AdobeAcrobatProDC.pkg.recipe",
136 | "AndroidStudio.pkg.recipe",
137 | "BraveBrowser.pkg.recipe",
138 | "Docker.pkg.recipe",
139 | "GitHubDesktop.pkg.recipe",
140 | "GoogleChrome.pkg.recipe",
141 | "MicrosoftExcel.pkg.recipe",
142 | "MicrosoftPowerPoint.pkg.recipe",
143 | "MicrosoftRemoteDesktop.pkg.recipe",
144 | "MicrosoftWord.pkg.recipe",
145 | "PyCharmCE.pkg.recipe",
146 | "TableauDesktop.pkg.recipe",
147 | "VLC.pkg.recipe",
148 | "Zoom.pkg.recipe"
149 | ]
150 | ```
151 |
152 | ## CREDITS
153 | [autopkg_tools.py](https://github.com/facebook/IT-CPE/tree/master/legacy/autopkg_tools) from Facebook under a BSD 3-clause license with modifications from [tig](https://6fx.eu) and [Gusto](https://github.com/Gusto/it-cpe-opensource/blob/main/autopkg/autopkg_tools.py).
154 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Reporting a Vulnerability
4 |
5 | If you believe you’ve found a security vulnerability in Kandji’s service, please notify us; we will work with you to resolve the issue promptly.
6 |
7 | Our full Responsible Disclosure Policy can be found here:
8 |
9 | https://www.kandji.io/security
10 |
--------------------------------------------------------------------------------
/build.zsh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh
2 | # Created 06/06/23; NRJA
3 | # Updated 06/13/23; NRJA
4 | ################################################################################################
5 | # License Information
6 | ################################################################################################
7 | #
8 | # Copyright 2023 Kandji, Inc.
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
11 | # software and associated documentation files (the "Software"), to deal in the Software
12 | # without restriction, including without limitation the rights to use, copy, modify, merge,
13 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
14 | # to whom the Software is furnished to do so, subject to the following conditions:
15 | #
16 | # The above copyright notice and this permission notice shall be included in all copies or
17 | # substantial portions of the Software.
18 | #
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
20 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
21 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
22 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 | # DEALINGS IN THE SOFTWARE.
25 | #
26 | ################################################################################################
27 |
28 | ##############################
29 | ########## VARIABLES #########
30 | ##############################
31 |
32 | # Get script name
33 | script_exec=$(basename $ZSH_ARGZERO)
34 | # Get directory of script execution
35 | dir=$(dirname $ZSH_ARGZERO)
36 | # Absolute path of exec dir
37 | abs_dir=$(realpath "${dir}")
38 |
39 | # Abs path to config.json
40 | config_abs_path=$(find "${abs_dir}" -name "config.json")
41 | # Abs path to anka_install_create_clone.zsh
42 | anka_abs_path=$(find "${abs_dir}" -name "anka_install_create_clone.zsh")
43 | # Abs path to main_orchestrator.zsh
44 | main_orch_abs_path=$(find "${abs_dir}" -name "main_orchestrator.zsh")
45 | # Abs path to CacheRecipeMetadata folder with proc/stub
46 | cache_proc_abs_path=$(find "${abs_dir}" -name "CacheRecipeMetadata")
47 |
48 | ###########################
49 | ########## CONFIG #########
50 | ###########################
51 |
52 | # Read in values set in config.json
53 | RUNTIME=$(plutil -extract host_runtime raw -o - "${config_abs_path}" 2>/dev/null)
54 | SLACK_NOTIFY=$(plutil -extract slack_notify raw -o - "${config_abs_path}" 2>/dev/null)
55 | RECIPES_DIR=$(plutil -extract local_autopkg_recipes_dir raw -o - "${config_abs_path}" 2>/dev/null)
56 |
57 | # Override Slack webhook if notify is set to false
58 | if [[ ! ${SLACK_NOTIFY} == true ]]; then
59 | # Re-set var and export for downstream execs
60 | SLACK_WEBHOOK_TOKEN=""
61 | export SLACK_WEBHOOK_TOKEN=""
62 | fi
63 |
64 | # Get folder name of recipes (will be placed in /tmp on remote VM)
65 | RECIPES_DIR_NAME=$(basename "${RECIPES_DIR}")
66 | # Re-set var and export for downstream execs
67 | RECIPES_DIR_NAME="${RECIPES_DIR_NAME}"
68 | export RECIPES_DIR_NAME="${RECIPES_DIR_NAME}"
69 |
70 | # Check if folder by name already exists, and if not, if RECIPES_DIR is valid path
71 | if [[ ! -d $(basename "${RECIPES_DIR}") ]] && [[ -d "${RECIPES_DIR}" ]]; then
72 | echo "${RECIPES_DIR_NAME} not found in local folder - copying over now..."
73 | cp -R "${RECIPES_DIR}" .
74 | if [[ -z $(find "${RECIPES_DIR}" -name "CacheRecipeMetadata") ]]; then
75 | if test -d "${cache_proc_abs_path}"; then
76 | # Copy over CacheMD proc, but no need to alert on it
77 | cp -R "${cache_proc_abs_path}" "./${RECIPES_DIR_NAME}"
78 | fi
79 | fi
80 | fi
81 |
82 | ##############################
83 | ########## FUNCTIONS #########
84 | ##############################
85 |
86 | ##############################################
87 | # Expects and will execute provided command to
88 | # validate service health for either Docker or
89 | # Anka. Service health checks return stdout
90 | # validated by test -n
91 | # Arguments:
92 | # "${1}", cmd to validate service is active
93 | # Returns:
94 | # Exit 1 if service never reports healthy
95 | ##############################################
96 | function wait_for_healthy_service() {
97 |
98 | echo "Checking if specified service is active (waiting up to 90 seconds)"
99 |
100 | service_check_cmd="${1}"
101 | timeout=1
102 | upperbound=30 # Allow 90 seconds before calling it
103 |
104 | # Checks provided to this func are designed to return stdout only if target app/service is healthy
105 | until [[ -n $(eval "${service_check_cmd}") ]] || [[ "${timeout}" -gt "${upperbound}" ]]; do
106 | sleep 3
107 |
108 | echo "$(date +'%r') : Still awaiting activation of service... (${timeout}/${upperbound})"
109 |
110 | let timeout++
111 | done
112 |
113 | if [[ "${timeout}" -gt "${upperbound}" ]] && [[ -z $(eval "${service_check_cmd}") ]]; then
114 | echo "ERROR: Requested service never activated when confirming from stdout of below command!\n"
115 | echo "${service_check_cmd}"
116 | echo "\nPlease validate unhealthy service and re-run ${script_exec}"
117 | exit 1
118 | fi
119 |
120 | echo "Service activated! Proceeding..."
121 |
122 | # Sleep 5 to give the service a bit more time
123 | sleep 5
124 | }
125 |
126 | ##############################################
127 | # Confirms runtime is not with sudo/root
128 | # Checks for valid Anka install, and prompts
129 | # to install (requiring sudo) or clone if no
130 | # running VM located. Outputs JSON file with
131 | # name and IP address of running Anka VM
132 | # Outputs:
133 | # Writes out ./running_vms.json with VM info
134 | # Assigns:
135 | # Variables assigned
136 | # Returns:
137 | # Exit 1 if any prechecks fail
138 | ##############################################
139 | function prechecks() {
140 |
141 | # Check for sudo or root
142 | if [[ "${EUID}" -eq 0 ]]; then
143 | echo "CRITICAL: Build script should not be run with sudo or as root!"
144 | exit 1
145 | fi
146 |
147 | # Check for running Anka VM
148 | if ! anka version >/dev/null 2>&1; then
149 | echo "ERROR: No Anka install found!"
150 | if read -q "?Download + install Anka (requires sudo!) and create a new macOS VM? (Y/N): "; then
151 | sudo "${anka_abs_path}"
152 | # Check for running Anka VM that reports IP address — grep for IP given expected pattern
153 | wait_for_healthy_service 'anka list -r -f ip 2>/dev/null | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b"'
154 | else
155 | echo "\n\nRun the below to install and create new macOS VM\n\nsudo ${anka_abs_path}\n"
156 | exit 1
157 | fi
158 | # If anka is installed, but no VM running, offer to clone and then continue
159 | elif [[ -z $(anka list -r 2>/dev/null) ]]; then
160 | echo "WARNING: No running Anka VMs found!"
161 | docker_alive=$(launchctl print gui/$(/usr/bin/stat -f%Du /dev/console) | grep -i application.com.docker | awk '{print $1}')
162 | if [[ ${docker_alive} -gt 0 ]]; then
163 | echo "WARNING: Due to a vendor bug, Docker must be fully closed before cloning/starting an Anka VM!"
164 | echo "Please fully close Docker Desktop and re-run ${script_exec}"
165 | exit 1
166 | fi
167 | echo "Cloning a new VM before continuing"
168 | echo "Will wait post-clone for Anka service for fully start"
169 | "${anka_abs_path}" --cloneonly
170 | # Check for running Anka VM that reports IP address — grep for IP given expected pattern
171 | wait_for_healthy_service 'anka list -r -f ip 2>/dev/null | grep -oE "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b"'
172 | fi
173 |
174 | # Write JSON file of running VM out to disk
175 | anka -j list -r -f name -f ip > ./running_vms.json || exit 1
176 | }
177 |
178 | ##############################################
179 | # Checks the assigned runtime — if local, runs
180 | # main_orchestrator.zsh. If docker, validates
181 | # Docker install, prompts for local run if
182 | # Docker not present, and confirms healthy
183 | # Docker prior to building and running image
184 | # Outputs:
185 | # AutoPkg results to autopkg-runner-results
186 | # AutoPkg metadata to autopkg_metadata.json
187 | # Returns:
188 | # If local runtime, main_orchestrator exec
189 | # Exit 1 on error
190 | ##############################################
191 | function execute_runtime() {
192 |
193 | if [[ "${RUNTIME}" == "local" ]]; then
194 |
195 | # Invoke main orchestrator on local machine with exec to replace this script
196 | exec "${main_orch_abs_path}"
197 |
198 | elif [[ "${RUNTIME}" == "docker" ]]; then
199 |
200 | # Confirm Docker is installed
201 | docker_installed=$(readlink $(which docker) | grep -o '.*Docker.app')
202 |
203 | if [[ -z ${docker_installed} ]]; then
204 | echo "WARNING: No Docker install found on-disk!"
205 | if read -q "?Proceed with Anka VM bootstrap using this Mac as host? (Y/N): "; then
206 | exec "${main_orch_abs_path}"
207 | else
208 | echo "\nExiting..."
209 | exit 0
210 | fi
211 | fi
212 |
213 | # Check if Docker proc is active
214 | docker_alive=$(launchctl print gui/$(/usr/bin/stat -f%Du /dev/console) | grep -i application.com.docker | awk '{print $1}')
215 |
216 | if [[ ${docker_alive} -gt 0 ]]; then
217 | # Docker version returns error code if EULA not accepted
218 | if ! docker version >/dev/null 2>&1; then
219 | echo "WARNING: Docker is open, but appears unhealthy."
220 | echo "Ensure setup is complete/EULAs accepted and re-run ${script_exec}"
221 | exit 1
222 | fi
223 | # Stop and remove existing containers and images
224 | docker stop $(docker ps -aq -f "Name=AnkaVM") 2>/dev/null
225 | docker rm -f $(docker ps -aq -f "Name=AnkaVM") 2>/dev/null
226 | docker rmi -f $(docker images --filter=reference='anka_vm') 2>/dev/null
227 | # Rebuild container, remove JSON file once built (JSON is copied over)
228 | docker build . -t 'anka_vm'; rm ./running_vms.json
229 | # Run Docker with ENV vars passed in to begin AutoPkg execution
230 | docker run -e "SLACK_WEBHOOK_TOKEN=${SLACK_WEBHOOK_TOKEN}" -e "RECIPES_DIR_NAME=${RECIPES_DIR_NAME}" -it --name 'AnkaVM' anka_vm
231 |
232 | # Post execution, create a dedicated folder for results
233 | mkdir -p ./autopkg-runner-results
234 | # Copy back any metadata for runtime/downloads
235 | docker cp $(docker ps -aq -f "Name=AnkaVM"):/app/autopkg_metadata.json . 2>/dev/null
236 | # Copy back full recipe results with timestamp to folder
237 | docker cp $(docker ps -aq -f "Name=AnkaVM"):/app/autopkg_full_results.plist ./autopkg-runner-results/autopkg_full_results_$(date +%Y-%m-%d_%H%M%S).plist
238 | else
239 | echo "WARNING: Docker installed, but not active."
240 | echo "Attempting to launch Docker from ${docker_installed}..."
241 | open -jga "${docker_installed}"
242 | # Check for running Docker Server that reports proper version
243 | # Server only shows version once Docker Engine healthy
244 | wait_for_healthy_service 'docker version 2>/dev/null | grep -i "Server.*Docker"'
245 | # Hit it again with Docker open
246 | execute_runtime
247 | fi
248 | else
249 | echo "WARNING: No runtime specified!"
250 | if read -q "?Proceed with Anka VM bootstrap using this Mac as host? (Y/N): "; then
251 | exec "${main_orch_abs_path}"
252 | fi
253 | echo "\nExiting..."
254 | fi
255 |
256 | }
257 |
258 | ##############################################
259 | # Main runtime
260 | ##############################################
261 | function main() {
262 |
263 | prechecks
264 |
265 | execute_runtime
266 |
267 | exit 0
268 | }
269 |
270 | ###############
271 | ##### MAIN ####
272 | ###############
273 |
274 | main
275 |
--------------------------------------------------------------------------------
/codebase/anka_install_create_clone.zsh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh
2 |
3 | ################################################################################################
4 | # Created by Daniel Chapa | support@kandji.io | Kandji, Inc. | Systems Engineering
5 | # Updated by Noah Anderson | support@kandji.io | Kandji, Inc. | Systems Engineering
6 | ################################################################################################
7 | # Created on 09/14/2022
8 | # Updated on 09/14/2022
9 | # Updated 06/09/23; NRJA
10 | ################################################################################################
11 | # Software Information
12 | ################################################################################################
13 | # This checks for an existing install, installs the latest version of Veertu Anka Develop,
14 | # and offers to spin up and clone a new VM image if none are found.
15 | ################################################################################################
16 | # License Information
17 | ################################################################################################
18 | #
19 | # Copyright 2023 Kandji, Inc.
20 | #
21 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
22 | # software and associated documentation files (the "Software"), to deal in the Software
23 | # without restriction, including without limitation the rights to use, copy, modify, merge,
24 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
25 | # to whom the Software is furnished to do so, subject to the following conditions:
26 | #
27 | # The above copyright notice and this permission notice shall be included in all copies or
28 | # substantial portions of the Software.
29 | #
30 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
31 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
32 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
33 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
34 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
35 | # DEALINGS IN THE SOFTWARE.
36 | #
37 | ################################################################################################
38 |
39 | #############################
40 | ######### ARGUMENTS #########
41 | #############################
42 |
43 | # Set arguments with zparseopts
44 | zparseopts -D -E -a opts h -help c -cloneonly v -verbose
45 |
46 | # shellcheck disable=SC2154
47 | # Set args for verbosity
48 | if (( ${opts[(I)(-v|--verbose)]} )); then
49 | set -x
50 | fi
51 |
52 | # If clone only set, clone VM in user context (no sudo) then exit
53 | if (( ${opts[(I)(-c|--cloneonly)]} )); then
54 | clone_only=true
55 | else
56 | clone_only=false
57 | fi
58 |
59 | # Set args for help
60 | if (( ${opts[(I)(-h|--help)]} )); then
61 | echo "Usage: sudo ./anka_install_create_clone.zsh [--help|--cloneonly|--verbose] [arguments...]"
62 | echo
63 | echo "Checks for, and if not found, downloads and installs Anka and activates Develop license"
64 | echo "Checks for any current VMs and offers to download latest macOS and immediately clone + start it"
65 | exit 0
66 | fi
67 |
68 | ##############################
69 | ########## VARIABLES #########
70 | ##############################
71 |
72 | #----- Download URL
73 | ANKA_DOWNLOAD_URL="https://veertu.com/downloads/anka-virtualization-arm"
74 |
75 | #----- Installer
76 | ANKA_INSTALLER="/tmp/anka-installer.pkg"
77 |
78 | #----- Developer Team ID for vendor
79 | VEERTU_ID="TT9FAWP6V4"
80 |
81 | console_user=$(/usr/bin/stat -f%Su /dev/console)
82 |
83 |
84 | ##############################
85 | ########## FUNCTIONS #########
86 | ##############################
87 |
88 | ##############################################
89 | # Validates user is running script with
90 | # appropriate permissions (sudo or root)
91 | ##############################################
92 | function prechecks() {
93 | if [[ ${clone_only} == true ]]; then
94 | vm_clone_start
95 | exit 0
96 | fi
97 |
98 | if [[ "${EUID}" -ne 0 ]]; then
99 | echo "Installation script must be run with sudo or as root"
100 | exit 1
101 | fi
102 | }
103 |
104 | ##############################################
105 | # Conditional check for Anka binary exec
106 | # Returns:
107 | # 0 if anka version succeeds, 1 on error.
108 | ##############################################
109 | function is_anka_installed() {
110 |
111 | # Confirm Anka binary is present and returns version
112 | if anka version >/dev/null 2>&1; then
113 | return 0
114 | else
115 | return 1
116 | fi
117 | }
118 |
119 | ##############################################
120 | # Conditional check for Anka license
121 | # Returns:
122 | # 0 if license show succeeds, 1 on error.
123 | ##############################################
124 | function check_anka_license() {
125 |
126 | # Confirm Anka license is active
127 | if anka license show >/dev/null 2>&1; then
128 | return 0
129 | else
130 | return 1
131 | fi
132 | }
133 |
134 | ##############################################
135 | # Checks if Anka is instaled, downloading
136 | # installer if not. Validates code signature
137 | # on PKG and then installs once confirmed.
138 | # Arguments:
139 | # Arguments taken (e.g. "${1}")
140 | # Outputs:
141 | # Installs Anka on-disk
142 | # Returns:
143 | # Exit 1 on PKG signature verification error
144 | ##############################################
145 | function download_validate_install() {
146 |
147 | if ! is_anka_installed; then
148 |
149 | # Download
150 | echo "\n\nAnka binary not found; downloading installer..."
151 | /usr/bin/curl -s -S -L "${ANKA_DOWNLOAD_URL}" -o "${ANKA_INSTALLER}"
152 |
153 | # Validate Package
154 | # First sed filters to the line, second returns value between (...)
155 | pkg_id=$(/usr/sbin/pkgutil --check-signature "${ANKA_INSTALLER}" | sed -n -e 's/^.*Developer ID Installer: //p' | sed -e 's/.*(\(.*\)).*/\1/;s/,//g')
156 |
157 | if [[ "${pkg_id}" != "${VEERTU_ID}" ]]; then
158 | echo "ERROR: PKG signature check failure!"
159 | echo "Expected Team ID was ${VEERTU_ID}; got ${pkg_id}"
160 | exit 1
161 | fi
162 |
163 | # Install
164 | sudo /usr/sbin/installer -pkg "${ANKA_INSTALLER}" -target /
165 | else
166 | echo "Anka binary already present"
167 | fi
168 | }
169 |
170 | # Detects current runtime of script and will run any user commands without invoking sudo
171 | function run_as_user() {
172 |
173 | if [[ "${EUID}" -eq 0 ]]; then
174 | # Eval is clearly not best practice, but fine for where we provide the cmds
175 | su - "${console_user}" -c "eval ${1}"
176 | else
177 | eval ${1}
178 | fi
179 | }
180 |
181 |
182 | ##############################################
183 | # Confirms Anka installed as expected, then
184 | # checks for licensing and accepts if inactive
185 | # Outputs:
186 | # Activates Anka license
187 | # Returns:
188 | # Exit 1 on install or license failure
189 | ##############################################
190 | function activate_anka() {
191 |
192 | if is_anka_installed; then
193 | echo "Anka install present"
194 | else
195 | echo "ERROR: Anka install failed! Check /var/log/install.log for details"
196 | exit 1
197 | fi
198 |
199 | if ! check_anka_license; then
200 | # Accept Licensing
201 | echo "Activating license..."
202 | sudo anka license accept-eula || true
203 | fi
204 |
205 | # Validate Licensing
206 | if check_anka_license; then
207 | echo "Anka license active"
208 | else
209 | echo "ERROR: Anka license was not activated!"
210 | exit 1
211 | fi
212 | }
213 |
214 | ##############################################
215 | # If no Anka VMs are found under the user env,
216 | # searches for the latest macOS and offers to
217 | # download and use it to create a new Anka VM
218 | # Outputs:
219 | # Creates new Anka VM running latest macOS
220 | # Returns:
221 | # Exit 0 if user elects not to create a VM
222 | ##############################################
223 | function offer_create_new_vm() {
224 |
225 | vm_count=$(run_as_user "anka -j list | plutil -extract body raw -o - -")
226 |
227 | if [[ ${vm_count} -lt 1 ]]; then
228 | # shellcheck disable=SC2051
229 | # bash doesn't support variables in brace range expansions, but zsh does
230 | for i in {0..$(anka -j create --list | plutil -extract body raw -o - -)}; do
231 | if anka -j create --list | plutil -extract body.${i}.latest raw -o - - >/dev/null 2>&1; then
232 | latest_macos=$(anka -j create --list | plutil -extract body.${i}.version raw -o - -)
233 | break
234 | fi
235 | done
236 |
237 | if read -q "?No Anka VM found for user ${console_user}! Create new VM running macOS ${latest_macos}? (Y/N): "; then
238 | anka_vm_name="anka_vm_${latest_macos}"
239 | # Create new VM downloading from latest macOS
240 | run_as_user "anka create ${anka_vm_name} latest"
241 | else
242 | echo "\nExiting..."
243 | exit 0
244 | fi
245 | fi
246 | }
247 |
248 | ##############################################
249 | # Counts the number of Anka VMs under the user
250 | # env — if only one found, clones and starts
251 | # it. If existing VM found with _CLONE name,
252 | # offers to delete/recreate, else starts VM
253 | # Outputs:
254 | # Creates VM clone and starts it
255 | ##############################################
256 | function vm_clone_start() {
257 |
258 | vm_name_count=$(run_as_user "anka -j list -s -f name" | plutil -extract body raw -o - -)
259 |
260 | declare -a all_vms
261 |
262 | if [[ ${vm_name_count} -lt 1 ]]; then
263 | offer_create_new_vm
264 | vm_clone_start
265 | elif [[ ${vm_name_count} -gt 1 ]]; then
266 | all_vms=($(run_as_user "anka list -s -f name" | grep '[[:alnum:]]' | sed 's/|//g' | tail -n +2 ))
267 | ps3_text=$(echo "Found more than one VM to clone! Type number and hit return to select VM from above\n: ")
268 | PS3=${ps3_text}
269 | select VM_NAME in "${all_vms[@]}"; do
270 | [[ -n ${VM_NAME} ]] || { echo "\nImproper selection! Please type number (e.g. 2) and hit return\n" >&2; continue; }
271 | vm_to_clone=${VM_NAME}
272 | break
273 | done
274 | else
275 | vm_to_clone=$(run_as_user "anka -j list -s -f name" | plutil -extract body.0.name raw -o - -)
276 | fi
277 |
278 | clone_name="${vm_to_clone}_CLONE"
279 |
280 | if run_as_user "anka list -f name | grep -o ${clone_name}" 2>/dev/null; then
281 | if read -q "?Found existing VM ${clone_name}! Delete and recreate before starting? (Y/N): "; then
282 | run_as_user "anka stop ${clone_name}" 2>/dev/null
283 | run_as_user "anka delete --yes ${clone_name}" 2>/dev/null
284 | run_as_user "anka clone ${vm_to_clone} ${clone_name}" || echo "ERROR: Unable to clone VM; see output for error"
285 | run_as_user "anka start -v ${clone_name}" 2>/dev/null
286 | else
287 | echo "Starting ${clone_name}..."
288 | run_as_user "anka start -v ${clone_name}" 2>/dev/null
289 | fi
290 | else
291 | echo "Cloning and starting new Anka VM ${clone_name}"
292 | run_as_user "anka clone ${vm_to_clone} ${clone_name}" || echo "ERROR: Unable to clone VM; see output for error"
293 | run_as_user "anka start -v ${clone_name}" 2>/dev/null
294 | fi
295 | }
296 |
297 | ##############################################
298 | # Deletes Anka installer from /tmp
299 | ##############################################
300 | function cleanup() {
301 |
302 | # Cleanup
303 | echo "Cleaning up..."
304 | /bin/rm -f "${ANKA_INSTALLER}"
305 | }
306 |
307 | ##=============================================================
308 | ## Script Run
309 | ##=============================================================
310 | function main() {
311 |
312 | prechecks
313 |
314 | download_validate_install
315 |
316 | activate_anka
317 |
318 | offer_create_new_vm
319 |
320 | vm_clone_start
321 |
322 | cleanup
323 | }
324 |
325 | ###############
326 | ##### MAIN ####
327 | ###############
328 |
329 | main
330 |
--------------------------------------------------------------------------------
/codebase/autopkg_tools.py:
--------------------------------------------------------------------------------
1 | #!/usr/local/autopkg/python
2 | ################################################################################################
3 | # License Information
4 | ################################################################################################
5 | #
6 | # Copyright 2023 Kandji, Inc.
7 | #
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
9 | # software and associated documentation files (the "Software"), to deal in the Software
10 | # without restriction, including without limitation the rights to use, copy, modify, merge,
11 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
12 | # to whom the Software is furnished to do so, subject to the following conditions:
13 | #
14 | # The above copyright notice and this permission notice shall be included in all copies or
15 | # substantial portions of the Software.
16 | #
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
18 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
19 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
20 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
21 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 | # DEALINGS IN THE SOFTWARE.
23 | #
24 | # BSD-3-Clause
25 | # Copyright (c) Facebook, Inc. and its affiliates.
26 | # Copyright (c) tig .
27 | # Copyright (c) Gusto, Inc.
28 | #
29 | # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
30 | #
31 | # 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
32 | #
33 | # 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
34 | #
35 | # 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
36 | #
37 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 | ################################################################################################
39 |
40 | #######################
41 | ####### IMPORTS #######
42 | #######################
43 |
44 | import argparse
45 | import json
46 | import logging
47 | import os
48 | import platform
49 | import plistlib
50 | import re
51 | import sys
52 | from fnmatch import fnmatch
53 | from glob import glob
54 | from pathlib import Path
55 | from subprocess import PIPE, STDOUT, CalledProcessError, run
56 |
57 | import requests
58 |
59 | #############################
60 | ######### ARGUMENTS #########
61 | #############################
62 |
63 | # Set parsers at the top so they're available to all funcs below
64 | parser = argparse.ArgumentParser(
65 | prog="autopkg_tools.py",
66 | description="Wrapper for AutoPkg",
67 | )
68 | parser.add_argument(
69 | "-l",
70 | "--list",
71 | action="store",
72 | required=False,
73 | default=False,
74 | help="Path to JSON blob or PLIST of recipe names.",
75 | )
76 | parser.add_argument(
77 | "-c",
78 | "--cache",
79 | action="store_true",
80 | required=False,
81 | default=False,
82 | help="Load and write previously cached metadata/xattrs for comparison; save out new metadata post-run.",
83 | )
84 | parser.add_argument(
85 | "-d",
86 | "--debug",
87 | action="store_true",
88 | required=False,
89 | default=False,
90 | help="Disables Slack alerts and increases verbosity for logging/stdout.",
91 | )
92 |
93 | args = parser.parse_args()
94 |
95 | ###########################
96 | ######### LOGGING #########
97 | ###########################
98 |
99 | # Get hostname for log record
100 | hostname = platform.node()
101 | # Local logging location
102 | path_to_log = "/var/tmp/autopkg_runner.log"
103 |
104 | logging_level = logging.DEBUG if args.debug else logging.INFO
105 |
106 | logging.basicConfig(
107 | level=logging_level,
108 | format="{asctime} " + f"[{hostname}]" + ": {levelname}: {message}",
109 | handlers=[logging.FileHandler(path_to_log), logging.StreamHandler()],
110 | style="{",
111 | datefmt="%Y-%m-%d %I:%M:%S %p",
112 | )
113 |
114 | log = logging.getLogger(__name__)
115 |
116 | #############################
117 | ######### VARIABLES #########
118 | #############################
119 |
120 |
121 | # Define PLIST func here to populate variable below
122 | def _plist_pal(path):
123 | """Function accepts argument of path to .plist file as `path`
124 | Returns plist formatted as dict"""
125 | with open(path, "rb") as f:
126 | loaded_plist = plistlib.load(f)
127 | return loaded_plist
128 |
129 |
130 | RELATIVE_DIR = Path(__file__).resolve()
131 | EXECUTION_DIR = Path(RELATIVE_DIR).parents[0]
132 | METADATA_CACHE_PATH = os.environ.get("METADATA_CACHE_PATH", "/tmp/autopkg_metadata.json")
133 | REPORT_PLIST_PATH = "/tmp/autopkg.plist"
134 | SLACK_WEBHOOK = os.environ.get("SLACK_WEBHOOK_TOKEN", None)
135 | ENV_RECIPES_DIR = os.environ.get("RECIPES_DIR_NAME", None)
136 | # Glob for AutoPkg PLIST that may have RECIPE_SEARCH_DIRS for us to read
137 | AUTOPKG_PLIST = glob("/Users/**/Library/Preferences/com.github.autopkg.plist")
138 | # If no ENV defined, set to value defined in AutoPkg plist
139 | RECIPES_DIR = (
140 | f"/tmp/{ENV_RECIPES_DIR}" if ENV_RECIPES_DIR is not None else _plist_pal(AUTOPKG_PLIST[0]).get("RECIPE_SEARCH_DIRS")
141 | )
142 | RECIPE_TO_RUN = os.environ.get("RECIPE", None)
143 |
144 | #############################
145 | ######### FUNCTIONS #########
146 | #############################
147 |
148 |
149 | def _run_command(shell_cmd):
150 | """Function accepts argument of shell command as `shell_cmd`
151 | Returns shell stderr + stdout and shell cmd exit code"""
152 | raw_out = run(shell_cmd, stdout=PIPE, stderr=STDOUT, shell=True, check=True)
153 | decoded_out = raw_out.stdout.decode().strip()
154 | exit_code = raw_out.returncode
155 | return exit_code, decoded_out
156 |
157 |
158 | def handle_recipe(recipe, args):
159 | """Run specified recipe, validate results, and log downloads/builds"""
160 | recipe.run(args)
161 | if recipe.results.get("downloaded"):
162 | log.info(f"SUCCESS: Downloaded {recipe.name}")
163 | if recipe.results.get("built"):
164 | log.info(f"SUCCESS: Built {recipe.name}")
165 | return recipe
166 |
167 |
168 | def parse_recipes(recipes):
169 | """Define recipe paths and map onto Recipe obj for execution"""
170 | recipe_list = []
171 | # If ENV var specified for recipe name, use that over recipe_list
172 | if RECIPE_TO_RUN:
173 | for recipe in recipes:
174 | ext = os.path.splitext(recipe)[1]
175 | if ext != ".recipe":
176 | recipe_list.append(recipe + ".recipe")
177 | else:
178 | recipe_list.append(recipe)
179 | else:
180 | # If we can't locate our recipe list, look in this script's relative dir
181 | if not os.path.exists(recipes):
182 | recipes = os.path.join(EXECUTION_DIR, recipes)
183 | ext = os.path.splitext(recipes)[1]
184 | if ext == ".json":
185 | parser = json.load
186 | elif ext == ".plist":
187 | parser = plistlib.load
188 | else:
189 | log.critical(f"Found invalid recipe list extension! Expected .plist/.json; got {ext}")
190 | sys.exit(1)
191 |
192 | with open(recipes, "rb") as f:
193 | recipe_list = parser(f)
194 |
195 | return map(Recipe, recipe_list)
196 |
197 |
198 | def load_cached_attributes():
199 | """JSON load previous metadata to return as dict
200 | If no metadata found, return empty dict"""
201 | # Load metadata cache file from disk
202 | try:
203 | with open(METADATA_CACHE_PATH) as cache_file:
204 | cached_files = json.load(cache_file)
205 | # Treat as new build
206 | except FileNotFoundError:
207 | cached_files = {}
208 | return cached_files
209 |
210 |
211 | def create_file_and_attributes(attributes_dict):
212 | """Read metadata cache from previous run and write out all items to disk
213 | If short name in cache path differs from logged in user, update path for current user
214 | Creates files via mkfile -n, consuming no disk space but reporting defined byte size when
215 | queried by AutoPkg via os module for filesize comparison; writes any associated xattrs to files"""
216 | # Python has no native support for extended attributes on macOS, so shell out to write attributes
217 | for i in attributes_dict:
218 | for dl_md in attributes_dict.get(i).get("download_metadata"):
219 | pathname = dl_md.get("pathname")
220 | etag = dl_md.get("etag")
221 | last_modified = dl_md.get("last_modified")
222 | dl_size_in_bytes = dl_md.get("dl_size_in_bytes")
223 |
224 | try:
225 | cache_path, cache_filename = os.path.split(pathname)
226 | log.debug(f"Found previous cache path {cache_path}")
227 | # Grab home dir shortname between Users and Library
228 | target_home_dir = cache_path[cache_path.find("/Users/") + len("/Users/") : cache_path.rfind("/Library")]
229 | exitc, console_user = _run_command("/usr/bin/stat -f%Su /dev/console")
230 | # If home directory and logged in username don't match, replace the former with the latter
231 | if None not in (target_home_dir, console_user) and target_home_dir != console_user:
232 | log.debug(
233 | f"Recorded home dir {target_home_dir} and logged in user {console_user} do not match; updating cache path"
234 | )
235 | cache_path = cache_path.replace(target_home_dir, console_user)
236 | pathname = pathname.replace(target_home_dir, console_user)
237 |
238 | # Replicate the previous download file with the metadata we know about it
239 | if not os.path.exists(cache_path):
240 | path_to_create = Path(cache_path)
241 | path_to_create.mkdir(parents=True, exist_ok=True)
242 | # Shell command to write file with specified size to path
243 | # If dl_size_in_bytes isn't valid, our _run_command will check and throw an exception
244 | _run_command(f"mkfile -n '{dl_size_in_bytes}' '{pathname}'")
245 | # Add metadata attributes or skip/report if None
246 | _run_command(f"xattr -w com.github.autopkg.etag '{etag}' '{pathname}'") if etag else log.info(
247 | f"Skipping write of attribute 'etag' for {i}; key is missing"
248 | )
249 | _run_command(
250 | f"xattr -w com.github.autopkg.last-modified '{last_modified}' '{pathname}'"
251 | ) if last_modified else log.info(f"Skipping write of attribute 'last_modified' for {i}; key is missing")
252 | log.info(f"Wrote file with xattrs and byte size {dl_size_in_bytes} to {pathname}")
253 | # Will hit this exception if "pathname" is NoneType when we try to split it
254 | except TypeError as e:
255 | log.critical(
256 | f"Issue when populating recipe '{i}' metadata!\nError is '{e}' for provided dict '{dl_md}'"
257 | )
258 | pass
259 |
260 |
261 | def _eval_recipe_results(recipe):
262 | """Check recipe obj status and define vars for Slack messaging"""
263 | task_title, task_description = None, None
264 | if recipe.error:
265 | try:
266 | task_title = f"Failed to run {recipe.name}"
267 | if not recipe.results["failed"]:
268 | task_description = "Unknown error"
269 | else:
270 | task_description = (
271 | f"ERROR: {recipe.results['failed'][0]['message']}\n"
272 | f"Traceback: {recipe.results['failed'][0]['traceback']}\n"
273 | )
274 | if "No releases found for repo" in task_description:
275 | # Just no updates
276 | return
277 | except AttributeError:
278 | task_title = "ERROR: Unable to locate specified recipe!"
279 | task_description = f"Skipping run of {recipe.recipe_name}; recipe doesn't exist or name is malformed."
280 |
281 | elif recipe.success:
282 | last_build = recipe.results["built"][-1].get("pkg_path")
283 | last_vers = recipe.results["built"][-1].get("version")
284 | if not last_vers:
285 | # Find a version number from our new PKG build
286 | out = re.search(r"([0-9](.*)[0-9])", last_build)
287 | try:
288 | version = out.group(0)
289 | except AttributeError:
290 | version = "Unknown"
291 | else:
292 | # If defined, set version as value from receipt plist
293 | version = last_vers
294 | all_builds = "\n".join([x.get("pkg_path") for x in recipe.results["built"] if x.get("pkg_path")])
295 | task_title = f"SUCCESS: Recipe {recipe.name} packaged new version {version}"
296 | task_description = f"*Build Path(s):*\n {all_builds}\n"
297 | return task_title, task_description
298 |
299 |
300 | def slack_alert(recipe, args):
301 | """Message to Slack channel specified in SLACK_WEBHOOK with recipe run results"""
302 | # Skip Slack if debug enabled
303 | if args.debug:
304 | log.debug("Skipping Slack notification - debug is enabled!")
305 | return
306 |
307 | # Skip Slack if no webhook defined
308 | if SLACK_WEBHOOK is None:
309 | log.warning("Skipping Slack notification - webhook is missing!")
310 | return
311 |
312 | # Populate title and description from recipe results
313 | task_title, task_description = _eval_recipe_results(recipe)
314 |
315 | # Validate all req'd vars are populated for Slack posting
316 | if task_title and task_description and SLACK_WEBHOOK:
317 | response = requests.post(
318 | SLACK_WEBHOOK,
319 | data=json.dumps(
320 | {
321 | "attachments": [
322 | {
323 | "username": "Autopkg",
324 | "as_user": True,
325 | "title": task_title,
326 | "color": "good" if not recipe.error else "danger",
327 | "text": task_description,
328 | "mrkdwn_in": ["text"],
329 | }
330 | ]
331 | }
332 | ),
333 | headers={"Content-Type": "application/json"},
334 | )
335 | if response.status_code != 200:
336 | msg = f"Request to Slack returned an error {response.status_code} with response {response.text}"
337 | raise ValueError(msg)
338 |
339 |
340 | class Recipe:
341 | """Object to interact with AutoPkg recipe"""
342 |
343 | def __init__(self, path):
344 | try:
345 | self.recipe_name = path
346 | self.path = next(iter(glob(f"{RECIPES_DIR}/**/{self.recipe_name}", recursive=True)))
347 | self.success = False
348 | self.error = False
349 | self.results = {}
350 | self._keys = None
351 | self._has_run = False
352 |
353 | except StopIteration:
354 | log.error(f"Could not locate {self.recipe_name}")
355 | self.error = True
356 | return None
357 |
358 | @property
359 | def plist(self):
360 | if self._keys is None:
361 | with open(self.path, "rb") as f:
362 | self._keys = plistlib.load(f)
363 |
364 | return self._keys
365 |
366 | @property
367 | def name(self):
368 | return self.plist.get("Input").get("NAME")
369 |
370 | def _get_pkg_version_from_receipt(self, new_dl):
371 | """Some processors don't return summary results with version/pkg_path
372 | This func will attempt to locate a receipt newer than the located DL
373 | and extract both version and pkg_path details for Slack notification"""
374 | # Set receipt pkg + version to None to return if we can't derive our version below
375 | receipt_pkg = None
376 | receipt_version = None
377 | # Get modification time of new DMG download
378 | dl_mod_time = os.path.getmtime(new_dl)
379 | # Get cache dir for build
380 | parent_path = Path(new_dl).parents[1]
381 |
382 | log.debug(f"Trying to get receipt data from provided DL {new_dl}")
383 |
384 | # Check if receipts dir exists
385 | if os.path.exists(os.path.join(parent_path, "receipts")):
386 | for receipt in os.scandir(os.path.join(parent_path, "receipts")):
387 | # If we find a receipt with a newer mod time than our download, likely the receipt for our new build
388 | if os.path.getmtime(receipt) > dl_mod_time:
389 | log.debug(f"Found new receipt at {receipt}")
390 | receipt_plist = _plist_pal(receipt)
391 | log.debug(f"Read in plist with contents {receipt_plist}")
392 | try:
393 | # Get "version" value from receipts plist and assign
394 | receipt_version = [
395 | values.get("version")
396 | for plist in receipt_plist
397 | for values in plist.values()
398 | if isinstance(values, dict) and "version" in values.keys()
399 | ][-1]
400 | log.debug(f"Found {receipt_version}")
401 | except IndexError:
402 | continue
403 | try:
404 | # Get "pkg_path" value from receipts plist and assign
405 | receipt_pkg = [
406 | values.get("pkg_path")
407 | for plist in receipt_plist
408 | for values in plist.values()
409 | if isinstance(values, dict) and "pkg_path" in values.keys()
410 | ][-1]
411 | except IndexError:
412 | continue
413 | return receipt_pkg, receipt_version
414 |
415 | def _parse_report(self, report):
416 | """Deeply parse AutoPkg summary to find all downloads/builds/failures"""
417 | # Read plist
418 | report_data = _plist_pal(report)
419 | failed_items = report_data.get("failures", [])
420 | downloaded_items = []
421 | built_items = []
422 | # If True, this means something happened
423 | if report_data.get("summary_results"):
424 | # Wildcard search for "pkg" in results to get key name since there are multiple possibilities
425 | pkg_summary_key = "".join([x for x in report_data["summary_results"].keys() if fnmatch(x, "*pkg*")])
426 | pkg_results = report_data.get("summary_results").get(pkg_summary_key, {})
427 | built_items.extend(pkg_results.get("data_rows", []))
428 | dl_results = report_data.get("summary_results").get("url_downloader_summary_result", {})
429 | downloaded_items.extend(dl_results.get("data_rows", []))
430 | # There are some cases where a new package was built, but processors like FlatPkgPacker don't show in results
431 | if dl_results and not pkg_results:
432 | # If so, look at the download path and identify if the DL'd file was a pkg and report it like a build
433 | if fringe_build := "".join(
434 | [
435 | next(iter(x.values()))
436 | for x in dl_results.get("data_rows")
437 | if fnmatch(next(iter(x.values())), "*pkg*")
438 | ]
439 | ):
440 | receipt_pkg, receipt_version = self._get_pkg_version_from_receipt(fringe_build)
441 |
442 | # Append pkg_path and version if values are not None
443 | # Elif append download as pkg_path and version if populated
444 | # Else append download as pkg_path and version will be Unknown
445 | if receipt_pkg and receipt_version:
446 | built_items.append({"pkg_path": receipt_pkg, "version": receipt_version})
447 | elif receipt_version:
448 | log.debug("Appending built items with version")
449 | built_items.append({"pkg_path": fringe_build, "version": receipt_version})
450 | else:
451 | built_items.append({"pkg_path": fringe_build})
452 |
453 | return {"built": built_items, "downloaded": downloaded_items, "failed": failed_items}
454 |
455 | def run(self, args):
456 | if not os.path.isfile(REPORT_PLIST_PATH):
457 | # Create this ourselves so it's ready for AutoPkg to write to
458 | Path(REPORT_PLIST_PATH).touch()
459 | try:
460 | cmd = [
461 | "/usr/local/bin/autopkg",
462 | "run",
463 | "-vvv",
464 | f'"{self.path}"',
465 | "--report-plist",
466 | REPORT_PLIST_PATH,
467 | ]
468 | if args.cache:
469 | cmd.extend(["--post", "io.kandji.cachedata/CacheRecipeMetadata"])
470 | # Concatenate our commands and run with subprocess
471 | cmd = " ".join(cmd)
472 | log.debug(f"Running {cmd}")
473 | _run_command(cmd)
474 |
475 | except CalledProcessError as e:
476 | log.error(e)
477 | self.error = True
478 |
479 | self._has_run = True
480 | self.results = self._parse_report(REPORT_PLIST_PATH)
481 |
482 | if self.results.get("built"):
483 | self.success = True
484 |
485 | return self.results
486 |
487 |
488 | def main():
489 | """Define recipes to run, write out previous cache (if exists),
490 | re-export ENV vars for downstream usage, parse/iterate over recipes,
491 | reporting successes/failures to Slack as they occur"""
492 | failures = []
493 |
494 | recipes = RECIPE_TO_RUN.split(", ") if RECIPE_TO_RUN else args.list if args.list else None
495 | if recipes is None:
496 | log.critical("Recipe --list or RECIPE_TO_RUN not provided!")
497 | sys.exit(1)
498 | if args.cache:
499 | attributes_dict = load_cached_attributes()
500 | create_file_and_attributes(attributes_dict)
501 | recipes = parse_recipes(recipes)
502 | for recipe in recipes:
503 | log.info(f"Running {recipe.recipe_name}...")
504 | try:
505 | handle_recipe(recipe, args)
506 | slack_alert(recipe, args)
507 | if failures:
508 | title = " ".join([f"{recipe.name}" for recipe in failures])
509 | lines = [f"{recipe.results['message']}\n" for recipe in failures]
510 | log.error(f"Some failures occurred!\n{title}:\n{lines}")
511 | except AttributeError as e:
512 | log.error(f"{e}: Skipping run of {recipe.recipe_name}! Recipe was not found.")
513 | slack_alert(recipe, args)
514 | pass
515 |
516 |
517 | ##############
518 | #### MAIN ####
519 | ##############
520 |
521 | if __name__ == "__main__":
522 | main()
523 |
--------------------------------------------------------------------------------
/codebase/helpers/anka_bootstrap.zsh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh
2 | # Created 06/01/22; NRJA
3 | # Updated 08/15/22; NRJA
4 | # Updated 06/02/23; NRJA
5 | # Updated 06/13/23; NRJA
6 | ################################################################################################
7 | # License Information
8 | ################################################################################################
9 | #
10 | # Copyright 2023 Kandji, Inc.
11 | #
12 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
13 | # software and associated documentation files (the "Software"), to deal in the Software
14 | # without restriction, including without limitation the rights to use, copy, modify, merge,
15 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
16 | # to whom the Software is furnished to do so, subject to the following conditions:
17 | #
18 | # The above copyright notice and this permission notice shall be included in all copies or
19 | # substantial portions of the Software.
20 | #
21 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
22 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
23 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
24 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
25 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26 | # DEALINGS IN THE SOFTWARE.
27 | #
28 | ################################################################################################
29 |
30 | ##############################
31 | ########## VARIABLES #########
32 | ##############################
33 |
34 | ##############################################
35 | # PATH TO AUTOPKG RECIPES — SET BY ENV
36 | ##############################################
37 | autopkg_recipes_dir="/tmp/${RECIPES_DIR_NAME}"
38 |
39 | # Get directory of script execution
40 | dir=$(dirname $ZSH_ARGZERO)
41 |
42 | # AutoPkg download/shasum variables
43 | autopkg_latest_url="https://api.github.com/repos/autopkg/autopkg/releases/latest"
44 | autopkg_pinned_pkg="https://github.com/autopkg/autopkg/releases/download/v2.7.2/autopkg-2.7.2.pkg"
45 | autopkg_pinned_shasum="2ff34daf02256ad81e2c74c83a9f4c312fa2f9dd212aba59e0cef0e6ba1be5c9"
46 | autopkg_temp_dl="/tmp/autopkg.pkg"
47 |
48 | # Machine info for stdout header
49 | serial_no=$(/usr/sbin/ioreg -c IOPlatformExpertDevice -d 2 | awk -F\" '/IOPlatformSerialNumber/{print $(NF-1)}')
50 | comp_name=$(/usr/sbin/scutil --get ComputerName)
51 | ip_addy=$(/sbin/ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p' | xargs)
52 | joint_identifier="${serial_no}/${comp_name}/${ip_addy}"
53 |
54 | # Populate logged in username
55 | console_user=$(/usr/bin/stat -f%Su /dev/console)
56 |
57 | # Source our Slack notification function
58 | source "${dir}/slack_notify.zsh"
59 |
60 | ##############################
61 | ########## FUNCTIONS #########
62 | ##############################
63 |
64 | ##############################################
65 | # Identifies and DLs latest release of AutoPkg
66 | # Validates shasum from known good value
67 | # If shasums differ, DLs pinned version
68 | # Will Slack notify about newer version
69 | # Globals:
70 | # slack_notify
71 | # Outputs:
72 | # Installs AutoPkg to disk
73 | # Returns:
74 | # Success, else exit 1 and notify on error
75 | ##############################################
76 | function autopkg_dl_install() {
77 | # Grab latest release of AutoPkg
78 | autopkg_pkg_dl=$(/usr/bin/curl -s -L "${autopkg_latest_url}" | /usr/bin/sed -n -e 's/^.*"browser_download_url": //p' | /usr/bin/tr -d \")
79 |
80 | # Download it - retry up to 3 more times if it fails
81 | /usr/bin/curl -s -L --retry 3 "${autopkg_pkg_dl}" -o "${autopkg_temp_dl}"
82 |
83 | # Check that shasum matches latest
84 | # Could hardcode our pinned version, but want to be alerted for new versions
85 | if [[ ! $(/usr/bin/shasum -a 256 "${autopkg_temp_dl}" 2>/dev/null | /usr/bin/awk '{print $1}') == ${autopkg_pinned_shasum} ]]; then
86 | slack_notify --status "NOTICE" --title "Shasum mismatch for AutoPkg download" --text "Attempted download from ${autopkg_pkg_dl}; may be a newer version?\nDownloading AutoPkg from pinned URL ${autopkg_pinned_pkg}"
87 |
88 | # If we have a shasum mismatch, try downloading the known good package of our pinned version
89 | autopkg_pkg_dl=${autopkg_pinned_pkg}
90 | /bin/rm "${autopkg_temp_dl}"
91 | /usr/bin/curl -L "${autopkg_pkg_dl}" -o "${autopkg_temp_dl}"
92 |
93 | # Confirm shasum of pinned value
94 | if [[ ! $(/usr/bin/shasum -a 256 "${autopkg_temp_dl}" 2>/dev/null | /usr/bin/awk '{print $1}') == ${autopkg_pinned_shasum} ]]; then
95 | echo "$(date +'%r') : ${joint_identifier}: CRITICAL: Shasum mismatch for AutoPkg download\nAttempted download from ${autopkg_pinned_pkg}, but shasum check failed!"
96 | slack_notify --status "CRITICAL" --title "Shasum mismatch for AutoPkg download" --text "Attempted download from ${autopkg_pinned_pkg}, but shasum check failed!"
97 | exit 1
98 | fi
99 | fi
100 |
101 | echo "$(date +'%r') : ${joint_identifier}: AutoPkg download complete — beginning install..."
102 |
103 | # Install core AutoPkg
104 | /usr/sbin/installer -pkg "${autopkg_temp_dl}" -target / 2>/dev/null
105 |
106 | # Validate success
107 | exit_code=$?
108 |
109 | if [[ "${exit_code}" == 0 ]]; then
110 | echo "$(date +'%r') : ${joint_identifier}: Successfully installed AutoPkg from core project"
111 | else
112 | slack_notify --status "ERROR" --title "AutoPkg Runner Failure" --text "AutoPkg install failed with error code ${exit_code}" --host_info "yes"
113 | exit 1
114 | fi
115 |
116 | # Remove temp DL
117 | /bin/rm "${autopkg_temp_dl}"
118 | }
119 |
120 |
121 | ##############################################
122 | # DL + install Rosetta 2 if needed for system
123 | # Globals:
124 | # slack_notify
125 | # Outputs:
126 | # Installs Rosetta 2 on disk
127 | # Returns:
128 | # Success, else exit 1 and notify on error
129 | ##############################################
130 | function rosetta_install() {
131 | # If needed, install Rosetta on Apple silicon arch
132 | /usr/sbin/softwareupdate --install-rosetta --agree-to-license 1>/dev/null
133 |
134 | # Validate success
135 | exit_code=$?
136 |
137 | if [[ "${exit_code}" == 0 ]]; then
138 | echo "$(date +'%r') : ${joint_identifier}: Successfully installed Rosetta 2 on Apple silicon hardware"
139 | else
140 | slack_notify --status "ERROR" --title "AutoPkg Runner Failure" --text "Rosetta 2 install failed on Apple silicon HW with error code ${exit_code}" --host_info "yes"
141 | exit 1
142 | fi
143 | }
144 |
145 | ##############################################
146 | # Configures AutoPkg config changes ownership
147 | # to logged-in user; pip installs requests
148 | # Globals:
149 | # slack_notify
150 | # Outputs:
151 | # Installs Python requests
152 | # Returns:
153 | # Success, else exit 1 and notify on error
154 | ##############################################
155 | function custom_autopkg_config() {
156 |
157 | echo "$(date +'%r') : ${joint_identifier}: Customizing AutoPkg config..."
158 |
159 | # Add our Git recipe directory to AutoPkg's search paths
160 | /usr/bin/defaults write "/Users/${console_user}/Library/Preferences/com.github.autopkg.plist" RECIPE_SEARCH_DIRS "${autopkg_recipes_dir}"
161 |
162 | # Make everything AutoPkg owned by the logged in user
163 | /usr/sbin/chown "${console_user}:staff" "/Users/${console_user}/Library/Preferences/com.github.autopkg.plist"
164 | /usr/sbin/chown -R "${console_user}:staff" "${autopkg_recipes_dir}"
165 |
166 | # Install requests with our AutoPkg Python
167 | # Running pip as root returns expected stderr, so supress out and check for return code
168 | /usr/local/autopkg/python -m pip install requests >/dev/null 2>&1
169 | exit_code=$?
170 |
171 | if [[ "${exit_code}" == 0 ]]; then
172 | echo "$(date +'%r') : ${joint_identifier}: Successfully installed AutoPkg Python dependencies"
173 | else
174 | slack_notify --status "ERROR" --title "AutoPkg Runner Failure" --text "AutoPkg Python dependencies failed to install with error code ${exit_code}" --host_info "yes"
175 | exit 1
176 | fi
177 | }
178 |
179 | ##############################################
180 | # Main run with logic checks for function exec
181 | ##############################################
182 | function main() {
183 |
184 | if ! /usr/local/bin/autopkg version >/dev/null 2>&1; then
185 | echo "$(date +'%r') : ${joint_identifier}: No AutoPkg found — beginning download..."
186 | autopkg_dl_install
187 | fi
188 |
189 | if [[ ! $(/usr/sbin/sysctl -n machdep.cpu.brand_string | /usr/bin/grep -oi "Intel") ]] && [[ ! $(/usr/bin/pgrep oahd) ]]; then
190 | echo "$(date +'%r') : ${joint_identifier}: Hardware type is not Intel and Rosetta 2 was not detected... installing."
191 | rosetta_install
192 | fi
193 |
194 | custom_autopkg_config
195 | }
196 |
197 | ###############
198 | ##### MAIN ####
199 | ###############
200 |
201 | main
202 |
--------------------------------------------------------------------------------
/codebase/helpers/slack_notify.zsh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh
2 | # Created 08/15/22; NRJA
3 | # Updated 06/01/23; NRJA
4 | ################################################################################################
5 | # License Information
6 | ################################################################################################
7 | #
8 | # Copyright 2023 Kandji, Inc.
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
11 | # software and associated documentation files (the "Software"), to deal in the Software
12 | # without restriction, including without limitation the rights to use, copy, modify, merge,
13 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
14 | # to whom the Software is furnished to do so, subject to the following conditions:
15 | #
16 | # The above copyright notice and this permission notice shall be included in all copies or
17 | # substantial portions of the Software.
18 | #
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
20 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
21 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
22 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 | # DEALINGS IN THE SOFTWARE.
25 | #
26 | ################################################################################################
27 |
28 | ##############################
29 | ########## VARIABLES #########
30 | ##############################
31 |
32 | slack_webhook_url=${SLACK_WEBHOOK_TOKEN}
33 | slack_footer_icon="https://avatars.githubusercontent.com/u/5170557?s=200&v=4"
34 |
35 | # Check if webhook is defined/expected format — if not, skip Slack notifications
36 | if [[ ! "${slack_webhook_url}" =~ hooks.slack.com/services ]]; then
37 | NO_SLACK=true
38 | fi
39 |
40 | ##############################
41 | ########## FUNCTIONS #########
42 | ##############################
43 |
44 | ##############################################
45 | # Sourceable function to send Slack messages
46 | # Globals:
47 | # slack_webhook_url
48 | # Arguments:
49 | # --status (str): SUCCESS/NOTICE/WARNING/ERROR
50 | # --title (str): Header of Slack notification
51 | # --text (str): Body of Slack notification
52 | # --host_info (bool): Send details of macOS host
53 | # Returns:
54 | # POST message to slack_webhook_url channel
55 | ##############################################
56 | function slack_notify() {
57 |
58 | zparseopts -D -E -A opts -status: -title: -link: -text: -host_info:
59 | # shellcheck disable=SC2154
60 | if [[ -n "${NO_SLACK}" ]]; then
61 | echo "Skipping $opts[--status] Slack notification with text $opts[--text]"
62 | return 0
63 | fi
64 |
65 | case $opts[--status] in
66 | SUCCESS)
67 | # Set alert color to green
68 | color="00FF00"
69 | icon="https://emoji.slack-edge.com/T9C5BNZ0D/visible_happiness/58eb35fc3dddedbd.png"
70 | ;;
71 | NOTICE)
72 | # Set alert color to magenta
73 | color="FF00C8"
74 | icon="https://emoji.slack-edge.com/T9C5BNZ0D/autoapps_intensify/364cc72c5f04f5f5.gif"
75 | ;;
76 | WARNING)
77 | # Set alert color to orange
78 | color="E8793B"
79 | icon="https://emoji.slack-edge.com/T9C5BNZ0D/yellow_alert/94fbc21b9646e931.gif"
80 | ;;
81 | ERROR)
82 | # Set alert color to red
83 | color="FF0000"
84 | icon="https://emoji.slack-edge.com/T9C5BNZ0D/red_alert/54c511cbd0ef70e5.gif"
85 | ;;
86 | *)
87 | # Else, set alert to black
88 | color="000000"
89 | icon="https://emoji.slack-edge.com/T9C5BNZ0D/spinning_beachball_of_death/e398593cdbd8557c.gif"
90 | ;;
91 | esac
92 |
93 | read -r -d '' payload_builder </dev/null
123 | }
124 |
--------------------------------------------------------------------------------
/codebase/main_orchestrator.zsh:
--------------------------------------------------------------------------------
1 | #!/bin/zsh
2 | # Created 06/21/22; NRJA
3 | # Updated 10/05/22; NRJA
4 | # Updated 06/02/23; NRJA
5 | # Updated 06/13/23; NRJA
6 | ################################################################################################
7 | # License Information
8 | ################################################################################################
9 | #
10 | # Copyright 2023 Kandji, Inc.
11 | #
12 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
13 | # software and associated documentation files (the "Software"), to deal in the Software
14 | # without restriction, including without limitation the rights to use, copy, modify, merge,
15 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
16 | # to whom the Software is furnished to do so, subject to the following conditions:
17 | #
18 | # The above copyright notice and this permission notice shall be included in all copies or
19 | # substantial portions of the Software.
20 | #
21 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
22 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
23 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
24 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
25 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26 | # DEALINGS IN THE SOFTWARE.
27 | #
28 | ################################################################################################
29 |
30 | #############################
31 | ######### ARGUMENTS #########
32 | #############################
33 |
34 | # Set arguments with zparseopts
35 | zparseopts -D -E -a opts h -help p -prechecks v -verbose
36 |
37 | # shellcheck disable=SC2154
38 | # Set args for verbosity
39 | if (( ${opts[(I)(-v|--verbose)]} )); then
40 | set -x
41 | fi
42 |
43 | # Set args for help
44 | if (( ${opts[(I)(-h|--help)]} )); then
45 | echo "Usage: ./main_orchestrator.zsh [--help|--prechecks|--verbose] [arguments...]"
46 | echo
47 | echo "Spins up a Docker container, instantiates, and connects to an Anka VM runner"
48 | echo "Clones, installs, and executes AutoPkg runners for selected recipes"
49 | exit 0
50 | fi
51 |
52 | #############################
53 | ######### VARIABLES #########
54 | #############################
55 |
56 | # Get script name
57 | script_exec=$(basename $ZSH_ARGZERO)
58 | # Get directory of script execution
59 | dir=$(dirname $ZSH_ARGZERO)
60 | # Absolute path of script exec dir
61 | abs_dir=$(realpath "${dir}")
62 | # Absolute path of script exec dir parent
63 | parent_abs_dir=$(dirname "${abs_dir}")
64 |
65 | # Path definition for SSH keypair
66 | key_dir="/tmp/anka_vm"
67 |
68 | # Remote username for VM
69 | remote_user="anka"
70 | # Remote password for VM
71 | # PW available on vendor website, so not privileged
72 | remote_pass="admin"
73 |
74 | # Define SSH key name (both private + public)
75 | ssh_key="${remote_user}_vm"
76 | public_key="${ssh_key}.pub"
77 |
78 | # File written to disk for installing public SSH key
79 | public_key_exec="/tmp/public_key_exec.zsh"
80 |
81 | # JSON filename with recipe names
82 | recipes_to_run="recipe_list.json"
83 |
84 | # Remote files/dirs written to be scp'd back to host
85 | remote_metadata_json="/tmp/autopkg_metadata.json"
86 | remote_cache_dir="/Users/${remote_user}/Library/AutoPkg/Cache"
87 | remote_report_plist="/tmp/combined_autopkg_results.plist"
88 |
89 | # If running locally on macOS, prepare the results and attach timestamp
90 | if [[ $(uname) == "Darwin" ]]; then
91 | mkdir -p "${parent_abs_dir}/autopkg-runner-results"
92 | local_upload_report="${parent_abs_dir}/autopkg-runner-results/autopkg_full_results_$(date +%Y-%m-%d_%H%M%S).plist"
93 | else
94 | # Docker takes care of this for us at the end
95 | local_upload_report="${parent_abs_dir}/autopkg_full_results.plist"
96 | fi
97 |
98 | # Local files copied over to or back from remote VM
99 | local_upload_metadata="${parent_abs_dir}/autopkg_metadata.json"
100 | local_save_metadata="${parent_abs_dir}/last_autopkg_metadata.json"
101 |
102 | # Set as array because could also auth through token or SSH key in ENV
103 | declare -a env_vars
104 | # Populate ENV vars used on remote Mac
105 | env_vars=(
106 | SLACK_WEBHOOK_TOKEN
107 | RECIPES_DIR_NAME
108 | )
109 |
110 | # Set vars with absolute paths of files/folders
111 | helpers_abs_path=$(find "${abs_dir}" -name "helpers")
112 | apkg_tools_abs_path=$(find "${abs_dir}" -name "autopkg_tools.py")
113 | recipe_list_abs_path=$(find "${parent_abs_dir}" -name "recipe_list.json")
114 | running_vms_abs_path=$(find "${parent_abs_dir}" -name "running_vms.json")
115 | # If the below isn't populated, we'll catch that error during prechecks
116 | recipes_abs_path=$(find "${parent_abs_dir}" -name "${RECIPES_DIR_NAME}")
117 |
118 | # Concatenate env vars with SendEnv SSH options
119 | ssh_env_flags=$(printf -- '-o SendEnv="%s" ' "${env_vars[@]}" | sed -e "s/ *$//")
120 |
121 | # Import our Slack notification code/function
122 | source "${dir}/helpers/slack_notify.zsh"
123 |
124 | ##############################
125 | ########## FUNCTIONS #########
126 | ##############################
127 |
128 |
129 | ##############################################
130 | # Checks under the user context if an Anka VM
131 | # is currently active and running
132 | # Returns:
133 | # 1 if no running Anka VM found
134 | ##############################################
135 | function is_vm_running() {
136 |
137 | if [[ -z $(anka list -r 2>/dev/null) ]]; then
138 | echo "$(date +'%r') : ${script_exec}: WARNING: No running Anka VMs found!"
139 | echo "$(date +'%r') : ${script_exec}: Run sudo ./anka_install_create_clone.zsh to clone a new VM"
140 | return 1
141 | fi
142 | }
143 |
144 | ##############################################
145 | # Runs prechecks to validate dependencies are
146 | # present and required infra reachable
147 | # Globals:
148 | # slack_notify
149 | # Returns:
150 | # Success, else exit 1 and notify on error
151 | ##############################################
152 | function prechecks() {
153 |
154 | declare -a undefined_vars
155 | # Iterate over expected env var names
156 | # Add any undefined ones to our array
157 | for ev in "${env_vars[@]}"; do
158 | if [[ -z "${(P)ev}" ]]; then
159 | echo "$(date +'%r') : ${script_exec}: ERROR: No definition for ENV ${ev}"
160 | undefined_vars+=${ev}
161 | else
162 | echo "$(date +'%r') : ${script_exec}: ENV ${ev} defined"
163 | fi
164 | done
165 |
166 | # If any env vars were undefined, report as error
167 | if [[ -n "${undefined_vars[*]}" ]]; then
168 | echo "$(date +'%r') : ${script_exec}: CRITICAL: ENV variable(s) never defined for ${undefined_vars[*]}"
169 | if [[ "${undefined_vars[*]}" == "SLACK_WEBHOOK_TOKEN" ]]; then
170 | echo "$(date +'%r') : ${script_exec}: WARNING: Runtime will continue, but no Slack notifications will be sent"
171 | else
172 | # If any undefined env vars outside of Slack webhook, exit 1
173 | slack_notify --status "ERROR" --title "Environment Variable Error" --text "Values never defined for ENV(s) ${undefined_vars[*]}" 2>/dev/null
174 | exit 1
175 | fi
176 | fi
177 |
178 | # Check that anka in path and version returns 0
179 | if anka version >/dev/null 2>&1; then
180 | # Validate VM is running — func messages above
181 | if ! is_vm_running; then
182 | exit 1
183 | fi
184 | # Source name and IP directly from Anka if on macOS
185 | running_anka_vm=$(anka -j list -r -f name -f ip)
186 | anka_name=$(plutil -extract body.0.name raw -o - - <<< ${running_anka_vm})
187 | anka_ip=$(plutil -extract body.0.ip raw -o - - <<< ${running_anka_vm})
188 | # If running anka version returns false, should be running in Docker with JSON file available
189 | elif test -f "${running_vms_abs_path}"; then
190 | # Validate jq is installed
191 | if ! which jq >/dev/null 2>&1; then
192 | slack_notify --status "ERROR" --title "Docker Config Error" --text "jq not located after running apt-get install jq"
193 | echo "$(date +'%r') : ${script_exec}: ERROR: jq not installed! Please run apt-get install jq and try again"
194 | exit 1
195 | fi
196 | anka_name=$(jq -r '.body[].name' "${running_vms_abs_path}")
197 | anka_ip=$(jq -r '.body[].ip' "${running_vms_abs_path}")
198 | else
199 | echo "$(date +'%r') : ${script_exec}: ERROR: Couldn't locate name/IP of running VM"
200 | echo "$(date +'%r') : ${script_exec}: Validate cloned VM is running and try again"
201 | exit 1
202 | fi
203 |
204 | if [[ $(uname) == "Darwin" ]]; then
205 | echo "$(date +'%r') : ${script_exec}: \n\nINFO: Running main_orchestrator.zsh from macOS host\n"
206 | fi
207 | }
208 |
209 | # Set args for prechecks (after defining our precheck func)
210 | if (( ${opts[(I)(-p|--prechecks)]} )); then
211 | echo "$(date +'%r') : ${script_exec}: Running prechecks..."
212 | prechecks
213 | echo "$(date +'%r') : ${script_exec}: Exiting..."
214 | exit 0
215 | fi
216 |
217 |
218 | ##############################################
219 | # Runs SSH command with flags on remote host
220 | # Arguments:
221 | # "${1}", IP/hostname of remote host
222 | # "${2}", command for remote execution
223 | ##############################################
224 | function ssh_exec() {
225 |
226 | ssh -q -i "${key_dir}/${ssh_key}" $(printf '%s' $ssh_env_flags) -o StrictHostKeyChecking=no "${remote_user}@${1}" "${2}"
227 | }
228 |
229 | ##############################################
230 | # Generates SSH keypair and assign to heredoc
231 | # written to disk to deploy public key for VM
232 | # Outputs:
233 | # Writes VM SSH setup to $public_key_exec
234 | # Remove $public_key_exec once run on VM
235 | ##############################################
236 | function generate_ssh_key() {
237 |
238 | if [[ -e ${key_dir} ]]; then
239 | /bin/rm -r ${key_dir}
240 | fi
241 |
242 | mkdir -p "${key_dir}"
243 |
244 | echo "$(date +'%r') : ${script_exec}: Generating SSH keypair..."
245 | /usr/bin/ssh-keygen -b 2048 -t rsa -f ${key_dir}/${ssh_key} -q -N ""
246 |
247 | # Validate anka binary
248 | if which anka >/dev/null 2>&1; then
249 | public_key_contents=$(/bin/cat "${key_dir}/${public_key}")
250 |
251 | # Assign our runtime command for VMs to a heredoc variable
252 | /bin/cat > "${public_key_exec}" <> .ssh/authorized_keys && /bin/chmod 640 .ssh/authorized_keys && chown -R ${remote_user} .ssh
256 | # Self-destruct this script
257 | /bin/rm "\${0}"
258 | exit 0
259 | EOF
260 | anka cp ${public_key_exec} ${anka_name}:/tmp
261 | anka run ${anka_name} sudo zsh ${public_key_exec}
262 | /bin/rm "${public_key_exec}"
263 | else
264 | sshpass -p "${remote_pass}" ssh-copy-id -o StrictHostKeyChecking=no -i "${key_dir}/${ssh_key}" "${remote_user}@${anka_ip}"
265 | fi
266 | }
267 |
268 | ##############################################
269 | # Validates if AutoPkg recipe metadata exists
270 | # Copies that + bootstrap over, adds ENV vars
271 | # Arguments:
272 | # Anka VM IP: "${1}"
273 | ##############################################
274 | function stage_runner() {
275 |
276 | ##########################################
277 | # Copy over bootstrap, helpers, metadata
278 | ##########################################
279 |
280 | if [[ -f "${local_upload_metadata}" ]] && [[ ! -f "${local_save_metadata}" ]]; then
281 | # If expected metadata from last run isn't present but local_upload_metadata is, use for comparison
282 | cp "${local_upload_metadata}" "${local_save_metadata}"
283 | fi
284 |
285 | if [[ -f "${local_save_metadata}" ]]; then
286 | # Rename last_autopkg_metadata.json to autopkg_metadata.json remotely with our scp below
287 | scp -q -o LogLevel=QUIET -i "${key_dir}/${ssh_key}" "${local_save_metadata}" "${remote_user}@${1}":"${remote_metadata_json}"
288 | fi
289 |
290 | # Copy over all helpers for AutoPkg runtime and bootstrapping
291 | scp -q -o LogLevel=QUIET -r -i "${key_dir}/${ssh_key}" -o StrictHostKeyChecking=no "${recipes_abs_path}" "${recipe_list_abs_path}" "${apkg_tools_abs_path}" "${helpers_abs_path}/"* "${remote_user}@${1}":"/tmp"
292 |
293 | ##########################################
294 | # Populate ENV in remote sshd_config
295 | ##########################################
296 |
297 | ssh_exec "${1}" "sudo chmod 666 /etc/ssh/sshd_config"
298 | # Iterate over and write out our ENV vars to the remote Mac
299 | for ev in "${env_vars[@]}"; do
300 | ssh_exec "${1}" "sudo echo AcceptEnv ${ev} >> /etc/ssh/sshd_config"
301 | done
302 | ssh_exec "${1}" "sudo chmod 644 /etc/ssh/sshd_config"
303 | }
304 |
305 |
306 | ##############################################
307 | # Executes bootstrap on VM with sudo, runs
308 | # AutoPkg recipe builds from ${recipes_to_run}
309 | # Reports on metadata diffs if JSON updated
310 | # Arguments:
311 | # Anka VM IP: "${1}"
312 | # Outputs:
313 | # Copies MD + report back to container/host
314 | ##############################################
315 | function execute_runner() {
316 |
317 | ##########################################
318 | # Bootstrap Anka VM
319 | ##########################################
320 |
321 | ssh_exec "${1}" "sudo -E zsh /tmp/anka_bootstrap.zsh"
322 |
323 | boot_exit_code=$?
324 |
325 | # Check exit code of bootstrap
326 | if [[ "${boot_exit_code}" -ne 0 ]]; then
327 | echo "$(date +'%r') : ${script_exec}: ERROR: Bootstrap exited with fatal error ${boot_exit_code}; aborting AutoPkg run... "
328 | slack_notify --status "ERROR" --title "Bootstrap Failure" --text "Bootstrap exited with fatal error ${boot_exit_code}\nAborting AutoPkg run..."
329 | exit ${boot_exit_code}
330 | fi
331 |
332 | ##########################################
333 | # Run AutoPkg recipes
334 | ##########################################
335 |
336 | # Run Python unbuffered (-u) so stdout is immediately returned
337 | ssh_exec "${1}" "/usr/local/autopkg/python -u /tmp/autopkg_tools.py --list ${recipes_to_run} --cache"
338 |
339 | apkgr_exit_code=$?
340 |
341 | # Check exit code of autopkg-runner
342 | if [[ "${apkgr_exit_code}" -ne 0 ]]; then
343 | echo "$(date +'%r') : ${script_exec}: ERROR: AutoPkg runner exited with fatal error ${apkgr_exit_code}; aborting AutoPkg run... "
344 | slack_notify --status "ERROR" --title "AutoPkg runner failure" --text "Runner exited with fatal error ${apkgr_exit_code}\nAborting AutoPkg run..."
345 | exit ${apkgr_exit_code}
346 | else
347 | echo "$(date +'%r') : ${script_exec}: SUCCESS: AutoPkg runner finished with exit code ${apkgr_exit_code}\n"
348 | fi
349 |
350 | ##########################################
351 | # Compile and scp back reports/metadata
352 | ##########################################
353 |
354 | # Create new plist; swap dict values for array
355 | ssh_exec "${1}" \
356 | "/usr/libexec/PlistBuddy -c 'Save' \"${remote_report_plist}\"; /usr/bin/sed -i '' 's/dict/array/g' \"${remote_report_plist}\""
357 |
358 | echo "$(date +'%r') : ${script_exec}: Combining below AutoPkg receipts into single file..."
359 |
360 | # Run a find on the remote Mac, looking for recipe plists that ran successfully, and then merge them into the unified AutoPkg results plist created above
361 | ssh_exec "${1}" \
362 | "/usr/bin/find \"${remote_cache_dir}\" -type f -iname \"*receipt*plist\" -exec grep -L 'stop_processing_recipe' {} + -exec /usr/libexec/PlistBuddy -x -c 'Merge \"{}\"' \"${remote_report_plist}\" \;"
363 |
364 | # If all looks good, bring back the metadata about our build and upload below if hashes differ
365 | scp -q -o LogLevel=QUIET -i "${key_dir}/${ssh_key}" "${remote_user}@${1}":"${remote_metadata_json}" "${local_upload_metadata}"
366 | # Copy full report plist from Cache dir
367 | scp -q -o LogLevel=QUIET -i "${key_dir}/${ssh_key}" "${remote_user}@${1}":"${remote_report_plist}" "${local_upload_report}"
368 |
369 | # ##########################################
370 | # Check for diffs, and if MD matches, rm old
371 | # ##########################################
372 |
373 | # # If on macOS vs Linux, need different commands to get the sha256 value
374 | if [[ $(uname) == "Darwin" ]]; then
375 | new_sha256=$(shasum -a 256 "${local_upload_metadata}" 2>/dev/null | awk '{print $1}')
376 | old_sha256=$(shasum -a 256 "${local_save_metadata}" 2>/dev/null | awk '{print $1}')
377 | # Linux uses a standalone command for SHA256
378 | elif [[ $(uname) == "Linux" ]]; then
379 | new_sha256=$(sha256sum "${local_upload_metadata}" 2>/dev/null | awk '{print $1}')
380 | old_sha256=$(sha256sum "${local_save_metadata}" 2>/dev/null | awk '{print $1}')
381 | fi
382 |
383 | if [[ "${new_sha256}" != "${old_sha256}" ]]; then
384 | echo "$(date +'%r') : ${script_exec}: SHA256 metadata updated for new recipe downloads"
385 | fi
386 | # Discard last_autopkg_metadata.json
387 | /bin/rm -f "${local_save_metadata}"
388 | }
389 |
390 |
391 | ##############################################
392 | # Main run
393 | ##############################################
394 | function main() {
395 |
396 | # Timestamp of start
397 | start_epoch=$(date +%s)
398 |
399 | echo "$(date +'%r') : ${script_exec}: Executing AutoPkg runtime at $(date +"%r %Z")"
400 | slack_notify --status "NOTICE" --title "Executing AutoPkg" --text "Beginning runtime at $(date +"%r %Z")"
401 |
402 | # Run prechecks — exit if any fail
403 | prechecks || exit 1
404 |
405 | # Generate SSH keypair
406 | generate_ssh_key
407 |
408 | # Stage remote runtime
409 | stage_runner "${anka_ip}"
410 |
411 | # Execute remote runtime
412 | execute_runner "${anka_ip}"
413 |
414 | # Timestamp of finish
415 | end_epoch=$(date +%s)
416 | # Get time elapsed in seconds, convert to hours + minutes where applicable
417 | exec_time=$(awk '{printf "%d hours, %02d minutes, %02d seconds", $1/3600, ($1/60)%60, $1%60}' <<< $(expr $end_epoch - $start_epoch))
418 |
419 | echo "$(date +'%r') : ${script_exec}: Terminating AutoPkg runtime at $(date +"%r %Z")\nExecution took ${exec_time} to complete\n"
420 | slack_notify --status "NOTICE" --title "Terminating AutoPkg" --text "Ending runtime at $(date +"%r %Z")\nExecution took ${exec_time} to complete"
421 |
422 | exit 0
423 | }
424 |
425 |
426 | ##############
427 | #### MAIN ####
428 | ##############
429 |
430 | main
431 |
--------------------------------------------------------------------------------
/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "host_runtime": "local",
3 | "local_autopkg_recipes_dir": "./example-recipes",
4 | "slack_notify": false
5 | }
6 |
--------------------------------------------------------------------------------
/example-recipes/AdobeAcrobatProDC.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the current release version of AdobeAcrobatPro and extracts the installer package.
7 | Identifier
8 | io.kandji.pkg.adobeacrobatpro
9 | Input
10 |
11 | NAME
12 | AdobeAcrobatPro
13 | APP_NAME
14 | Adobe Acrobat.app
15 | DOWNLOAD_URL
16 | https://trials.adobe.com/AdobeProducts/APRO/Acrobat_HelpX/osx10/Acrobat_DC_Web_WWMUI.dmg
17 | SUB_PKG_WITH_APP
18 | application.pkg/Payload
19 | PAYLOAD_INFO_PLIST_PATH
20 | payload/%APP_NAME%/Contents/Info.plist
21 | PKG_SIGNATURE
22 |
23 | Developer ID Installer: Adobe Inc. (JQ525L2MZD)
24 | Developer ID Certification Authority
25 | Apple Root CA
26 |
27 |
28 | MinimumVersion
29 | 1.0.0
30 | Process
31 |
32 |
33 | Processor
34 | URLDownloader
35 | Arguments
36 |
37 | url
38 | %DOWNLOAD_URL%
39 | filename
40 | %NAME%.dmg
41 |
42 |
43 |
44 | Processor
45 | StopProcessingIf
46 | Arguments
47 |
48 | predicate
49 | download_changed == False
50 |
51 |
52 |
53 | Processor
54 | EndOfCheckPhase
55 |
56 |
57 | Processor
58 | FileFinder
59 | Arguments
60 |
61 | pattern
62 | %pathname%/**/*.pkg
63 |
64 |
65 |
66 | Processor
67 | CodeSignatureVerifier
68 | Arguments
69 |
70 | input_path
71 | %pathname%/%dmg_found_filename%
72 | expected_authority_names
73 | %PKG_SIGNATURE%
74 |
75 |
76 |
77 | Processor
78 | FlatPkgUnpacker
79 | Arguments
80 |
81 | flat_pkg_path
82 | %pathname%/%dmg_found_filename%
83 | destination_path
84 | %RECIPE_CACHE_DIR%/unpack
85 |
86 |
87 |
88 | Processor
89 | PkgPayloadUnpacker
90 | Arguments
91 |
92 | pkg_payload_path
93 | %RECIPE_CACHE_DIR%/unpack/%SUB_PKG_WITH_APP%
94 | destination_path
95 | %RECIPE_CACHE_DIR%/payload
96 |
97 |
98 |
99 | Processor
100 | PlistReader
101 | Arguments
102 |
103 | info_path
104 | %RECIPE_CACHE_DIR%/%PAYLOAD_INFO_PLIST_PATH%
105 | plist_keys
106 |
107 | CFBundleShortVersionString
108 | version
109 |
110 |
111 |
112 |
113 | Processor
114 | PkgCopier
115 | Arguments
116 |
117 | source_pkg
118 | %pathname%/%dmg_found_filename%
119 | pkg_path
120 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
121 |
122 |
123 |
124 | Processor
125 | PathDeleter
126 | Arguments
127 |
128 | path_list
129 |
130 | %RECIPE_CACHE_DIR%/unpack
131 | %RECIPE_CACHE_DIR%/payload
132 |
133 |
134 |
135 |
136 |
137 |
138 |
--------------------------------------------------------------------------------
/example-recipes/AndroidStudio.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest Apple silicon version of Android Studio, extracts it from a DMG, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.androidstudio
9 | Input
10 |
11 | NAME
12 | Android Studio
13 | APP_NAME
14 | %NAME%.app
15 | SEARCH_URL
16 | https://developer.android.com/studio
17 | RE_PATTERN
18 | href=\"(https\://redirector.*android/studio/install/.+/android-studio.+mac_arm\.dmg)\"
19 | CODE_SIGNATURE
20 | identifier "com.google.android.studio" and anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = EQHXZ8M8AV
21 |
22 | MinimumVersion
23 | 1.0.0
24 | Process
25 |
26 |
27 | Processor
28 | URLTextSearcher
29 | Arguments
30 |
31 | re_pattern
32 | %RE_PATTERN%
33 | url
34 | %SEARCH_URL%
35 |
36 |
37 |
38 | Processor
39 | URLDownloader
40 | Arguments
41 |
42 | filename
43 | %NAME%.dmg
44 | url
45 | %match%
46 | CHECK_FILESIZE_ONLY
47 |
48 |
49 |
50 |
51 | Processor
52 | StopProcessingIf
53 | Arguments
54 |
55 | predicate
56 | download_changed == False
57 |
58 |
59 |
60 | Processor
61 | EndOfCheckPhase
62 |
63 |
64 | Processor
65 | CodeSignatureVerifier
66 | Arguments
67 |
68 | input_path
69 | %pathname%/%APP_NAME%
70 | requirement
71 | %CODE_SIGNATURE%
72 | strict_verification
73 |
74 |
75 |
76 |
77 | Processor
78 | AppDmgVersioner
79 | Arguments
80 |
81 | dmg_path
82 | %pathname%
83 |
84 |
85 |
86 | Processor
87 | AppPkgCreator
88 | Arguments
89 |
90 | app_path
91 | %pathname%/%APP_NAME%
92 | bundleid
93 | %bundleid%
94 | version
95 | %version%
96 | pkg_path
97 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
98 |
99 |
100 |
101 |
102 |
103 |
--------------------------------------------------------------------------------
/example-recipes/BraveBrowser.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest version of Brave Browser, extracts it from a DMG, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.bravebrowser
9 | Input
10 |
11 | NAME
12 | Brave Browser
13 | APP_NAME
14 | %NAME%.app
15 | DOWNLOAD_URL
16 | https://referrals.brave.com/latest/Brave-Browser.dmg
17 | CODE_SIGNATURE
18 | identifier "com.brave.Browser" and anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = KL8N8XSYF4
19 |
20 | MinimumVersion
21 | 1.0.0
22 | Process
23 |
24 |
25 | Processor
26 | URLDownloader
27 | Arguments
28 |
29 | url
30 | %DOWNLOAD_URL%
31 | filename
32 | %NAME%.dmg
33 |
34 |
35 |
36 | Processor
37 | StopProcessingIf
38 | Arguments
39 |
40 | predicate
41 | download_changed == False
42 |
43 |
44 |
45 | Processor
46 | EndOfCheckPhase
47 |
48 |
49 | Processor
50 | CodeSignatureVerifier
51 | Arguments
52 |
53 | input_path
54 | %pathname%/%APP_NAME%
55 | requirement
56 | %CODE_SIGNATURE%
57 | deep_verification
58 |
59 |
60 |
61 |
62 | Processor
63 | AppDmgVersioner
64 | Arguments
65 |
66 | dmg_path
67 | %pathname%
68 |
69 |
70 |
71 | Processor
72 | AppPkgCreator
73 | Arguments
74 |
75 | app_path
76 | %pathname%/%APP_NAME%
77 | bundleid
78 | %bundleid%
79 | version
80 | %version%
81 | pkg_path
82 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
83 |
84 |
85 |
86 |
87 |
88 |
--------------------------------------------------------------------------------
/example-recipes/CacheRecipeMetadata/CacheRecipeMetadata.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Updated 09/01/22; NRJA
3 | # Updated 06/07/23; NRJA
4 | ################################################################################################
5 | # License Information
6 | ################################################################################################
7 | #
8 | # Copyright 2023 Kandji, Inc.
9 | #
10 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this
11 | # software and associated documentation files (the "Software"), to deal in the Software
12 | # without restriction, including without limitation the rights to use, copy, modify, merge,
13 | # publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
14 | # to whom the Software is furnished to do so, subject to the following conditions:
15 | #
16 | # The above copyright notice and this permission notice shall be included in all copies or
17 | # substantial portions of the Software.
18 | #
19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
20 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
21 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
22 | # FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 | # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 | # DEALINGS IN THE SOFTWARE.
25 | #
26 | ################################################################################################
27 | # Based originally on LastRecipeRunResult
28 | # Copyright 2019-Present Graham R Pugh
29 | # Copyright 2022 Gusto, Inc. (https://www.gusto.com/)
30 | #
31 | # Licensed under the Apache License, Version 2.0 (the "License");
32 | # you may not use this file except in compliance with the License.
33 | # You may obtain a copy of the License at
34 | #
35 | # https://www.apache.org/licenses/LICENSE-2.0
36 | #
37 | # Unless required by applicable law or agreed to in writing, software
38 | # distributed under the License is distributed on an "AS IS" BASIS,
39 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
40 | # See the License for the specific language governing permissions and
41 | # limitations under the License.
42 | """See docstring for CacheRecipeMetadata class"""
43 |
44 | #######################
45 | ####### IMPORTS #######
46 | #######################
47 |
48 | import json
49 | import os
50 | from datetime import datetime
51 | from pathlib import Path
52 | from subprocess import PIPE, STDOUT, run
53 |
54 | from autopkglib import Processor # pylint: disable=import-error
55 |
56 | #############################
57 | ######### VARIABLES #########
58 | #############################
59 |
60 | __all__ = ["CacheRecipeMetadata"]
61 |
62 | #############################
63 | ######### FUNCTIONS #########
64 | #############################
65 |
66 |
67 | def _run_command(shell_cmd):
68 | """Function accepts argument of shell command as `shell_cmd`
69 | Returns shell stderr + stdout and shell cmd exit code"""
70 | raw_out = run(shell_cmd, stdout=PIPE, stderr=STDOUT, shell=True, check=False)
71 | decoded_out = raw_out.stdout.decode().strip()
72 | exit_code = raw_out.returncode
73 | return exit_code, decoded_out
74 |
75 |
76 | class CacheRecipeMetadata(Processor):
77 | """An AutoPkg processor for writing recipe metadata to a JSON file."""
78 |
79 | input_variables = {
80 | "output_file_path": {"description": ("Path to output file."), "required": False},
81 | "output_file_name": {
82 | "description": ("Name of output file."),
83 | "required": False,
84 | "default": "autopkg_metadata.json",
85 | },
86 | "url": {"description": ("the download URL."), "required": False},
87 | "RECIPE_PATH": {"description": ("The name of the package."), "required": False},
88 | "download_changed": {"description": ("If any downloads changed."), "required": False},
89 | "pathname": {
90 | "description": ("The path to the downloaded installer."),
91 | "required": False,
92 | },
93 | "last_modified": {
94 | "description": ("last_modified output from URLDownloader."),
95 | "required": False,
96 | },
97 | "etag": {
98 | "description": ("etag output from URLDownloader."),
99 | "required": False,
100 | "default": None,
101 | },
102 | }
103 |
104 | output_variables = {
105 | "url": {"description": ("the download URL.")},
106 | "last_modified": {"description": ("The current package last_modified.")},
107 | "etag": {"description": ("The outputted value for etag.")},
108 | "RECIPE_PATH": {"description": ("the package name.")},
109 | }
110 |
111 | description = __doc__
112 |
113 | def get_latest_recipe_run_info(self, output_file):
114 | """Load CacheRecipeMetadata output from disk."""
115 | try:
116 | with open(output_file) as fp:
117 | data = json.load(fp)
118 | except (OSError, ValueError):
119 | data = {}
120 | return data
121 |
122 | def find_downloads_dir(self, directory):
123 | """Drill down from provided path until we reach downloads dir"""
124 | while not str(directory).endswith("downloads"):
125 | directory = str(Path(directory).parent)
126 | return directory
127 |
128 | def populate_multiple_dls(self, dls_dir, known_dl):
129 | """Returns a list of dicts containing metadata for DL files appearing valid"""
130 |
131 | return_list = []
132 | # Identify files
133 | for root, dirs, files in os.walk(dls_dir):
134 | for name in files:
135 | additional_dl_path = os.path.join(root, name)
136 |
137 | placeholder_dict = {}
138 | additional_dl_size = os.path.getsize(additional_dl_path)
139 |
140 | # Populate any file names that don't match our reported recipe DL
141 | # Set our minimum DL size to 500KB to weed out tmp files
142 | if additional_dl_size > 500000 and additional_dl_path != known_dl:
143 | print(f"{additional_dl_path} appears to be valid with byte size {additional_dl_size}")
144 | # Grab previous curl etag
145 | exitc, add_etag = _run_command(
146 | f'xattr -p com.github.autopkg.etag "{additional_dl_path}" 2>/dev/null'
147 | )
148 | # Grab previous last modified
149 | exitc, add_last_mod = _run_command(
150 | f'xattr -p com.github.autopkg.last-modified "{additional_dl_path}" 2>/dev/null'
151 | )
152 | # Grab URL DL metadata (if exists)
153 | # Useful info to capture if so, but not required
154 | exitc, additional_dl = _run_command(
155 | f'xattr -p com.apple.metadata:kMDItemWhereFroms "{additional_dl_path}" 2>/dev/null'
156 | )
157 | # Grab file type data to determine if .zip/.dmg/.pkg
158 | exitc, additional_dl_type = _run_command(f'file -b "{additional_dl_path}" 2>/dev/null')
159 | # A valid DL will typically have metadata for last modification
160 | # If not, check if the file type has "archive" or "compressed" in the type
161 | if add_last_mod or "archive" in additional_dl_type or "compressed" in additional_dl_type:
162 | # If downloading multiple files, the URL value is overwritten by subsequent ones
163 | # It's stored somewhere in the autopkg cache run data
164 | # But there's not a clean way to get it, so pull from file metadata (if present)
165 | if additional_dl:
166 | self.output(f"Bonus URL: {additional_dl}")
167 | placeholder_dict["url"] = additional_dl
168 | if additional_dl_path:
169 | self.output(f"Bonus path: {additional_dl_path}")
170 | placeholder_dict["pathname"] = additional_dl_path
171 | if add_etag:
172 | self.output(f"Bonus etag: {add_etag}")
173 | placeholder_dict["etag"] = add_etag
174 | if add_last_mod:
175 | self.output(f"Bonus last_modified: {add_last_mod}")
176 | placeholder_dict["last_modified"] = add_last_mod
177 | if additional_dl_size:
178 | self.output(f"Bonus DL size: {additional_dl_size}")
179 | placeholder_dict["dl_size_in_bytes"] = str(additional_dl_size)
180 |
181 | return_list.append(placeholder_dict)
182 | return return_list
183 |
184 | def main(self):
185 | """output the values to a file in the location provided"""
186 |
187 | output_file_path = self.env.get("output_file_path")
188 | output_file_name = self.env.get("output_file_name")
189 | self.env.get("download_changed")
190 | pathname = self.env.get("pathname")
191 | recipe_name = self.env.get("RECIPE_PATH")
192 | url = self.env.get("url")
193 | last_modified = self.env.get("last_modified")
194 | etag = self.env.get("etag")
195 | dl_size_in_bytes = os.path.getsize(pathname)
196 |
197 | recipe_path, recipe_filename = os.path.split(recipe_name)
198 |
199 | # If we have multiple valid downloads, we want to record those
200 | # Data will be stored as a list of dicts under the recipe_filename key
201 | first_download_dict = {}
202 | recipe_metadata_list = []
203 |
204 | if not output_file_path:
205 | output_file_path = "/tmp"
206 | output_file = os.path.join(output_file_path, output_file_name)
207 |
208 | # Load stored JSON file
209 | data = self.get_latest_recipe_run_info(output_file)
210 |
211 | cache_modified = False
212 |
213 | # Replace modified values
214 | # Create new key if recipe metadata not previously cached
215 | if recipe_filename not in data.keys():
216 | data[recipe_filename] = {}
217 |
218 | if url:
219 | self.output(f"URL: {url}")
220 | first_download_dict["url"] = url
221 |
222 | if pathname:
223 | self.output(f"Path: {pathname}")
224 | first_download_dict["pathname"] = pathname
225 |
226 | if etag:
227 | self.output(f"etag: {etag}")
228 | cache_modified = True
229 | first_download_dict["etag"] = etag
230 | if last_modified:
231 | self.output(f"last_modified: {last_modified}")
232 | cache_modified = True
233 | first_download_dict["last_modified"] = last_modified
234 | if dl_size_in_bytes:
235 | self.output(f"DL size: {dl_size_in_bytes}")
236 | cache_modified = True
237 | first_download_dict["dl_size_in_bytes"] = str(dl_size_in_bytes)
238 |
239 | recipe_metadata_list.append(first_download_dict)
240 |
241 | downloads_dir = self.find_downloads_dir(pathname)
242 | dl_dir_contents = os.listdir(downloads_dir)
243 |
244 | # We want to capture multiple downloads if they exist
245 | if len(dl_dir_contents) > 1:
246 | bonus_dls_list = self.populate_multiple_dls(downloads_dir, pathname)
247 |
248 | # If our list doesn't return empty, the cache was likely modified
249 | if bonus_dls_list:
250 | cache_modified = True
251 | # Update our primary DL results with ancillary
252 | recipe_metadata_list.extend(bonus_dls_list)
253 |
254 | if cache_modified:
255 | data[recipe_filename]["cache_timestamp"] = str(datetime.now())
256 | if recipe_metadata_list:
257 | data[recipe_filename]["download_metadata"] = recipe_metadata_list
258 |
259 | # Write changes back to stored JSON file
260 | with open(output_file, "w") as outfile:
261 | json.dump(data, outfile, indent=4)
262 |
263 | self.output(f"Metadata cache written to: {output_file}")
264 |
265 |
266 | if __name__ == "__main__":
267 | PROCESSOR = CacheRecipeMetadata()
268 | PROCESSOR.execute_shell()
269 |
--------------------------------------------------------------------------------
/example-recipes/CacheRecipeMetadata/io.kandji.cachedata.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Stub Recipe for CacheRecipeMetadata processor
7 | Identifier
8 | io.kandji.cachedata
9 |
10 |
11 |
--------------------------------------------------------------------------------
/example-recipes/Docker.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest Intel version of Docker, extracts it from a DMG, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.docker
9 | Input
10 |
11 | NAME
12 | Docker
13 | APP_NAME
14 | %NAME%.app
15 | DOWNLOAD_URL
16 | https://desktop.docker.com/mac/stable/amd64/Docker.dmg
17 | CODE_SIGNATURE
18 | anchor apple generic and identifier "com.docker.docker" and (certificate leaf[field.1.2.840.113635.100.6.1.9] /* exists */ or certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = "9BNSXJN65R")
19 |
20 | MinimumVersion
21 | 1.0.0
22 | Process
23 |
24 |
25 | Processor
26 | URLDownloader
27 | Arguments
28 |
29 | url
30 | %DOWNLOAD_URL%
31 | filename
32 | %NAME%.dmg
33 |
34 |
35 |
36 | Processor
37 | StopProcessingIf
38 | Arguments
39 |
40 | predicate
41 | download_changed == False
42 |
43 |
44 |
45 | Processor
46 | EndOfCheckPhase
47 |
48 |
49 | Processor
50 | CodeSignatureVerifier
51 | Arguments
52 |
53 | input_path
54 | %pathname%/%APP_NAME%
55 | requirement
56 | %CODE_SIGNATURE%
57 | strict_verification
58 |
59 |
60 |
61 |
62 | Processor
63 | AppDmgVersioner
64 | Arguments
65 |
66 | dmg_path
67 | %pathname%
68 |
69 |
70 |
71 | Processor
72 | AppPkgCreator
73 | Arguments
74 |
75 | app_path
76 | %pathname%/%APP_NAME%
77 | bundleid
78 | %bundleid%
79 | version
80 | %version%
81 | pkg_path
82 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
83 |
84 |
85 |
86 |
87 |
88 |
--------------------------------------------------------------------------------
/example-recipes/GitHubDesktop.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest Intel version of GitHub Desktop, extracts it from a ZIP, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.githubdesktop
9 | Input
10 |
11 | NAME
12 | GitHub Desktop
13 | APP_NAME
14 | %NAME%.app
15 | DOWNLOAD_URL
16 | https://central.github.com/deployments/desktop/desktop/latest/darwin
17 | CODE_SIGNATURE
18 | identifier "com.github.GitHubClient" and anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = VEKTX9H2N7
19 |
20 | MinimumVersion
21 | 1.0.0
22 | Process
23 |
24 |
25 | Processor
26 | URLDownloader
27 | Arguments
28 |
29 | url
30 | %DOWNLOAD_URL%
31 | filename
32 | %NAME%.zip
33 |
34 |
35 |
36 | Processor
37 | StopProcessingIf
38 | Arguments
39 |
40 | predicate
41 | download_changed == False
42 |
43 |
44 |
45 | Processor
46 | EndOfCheckPhase
47 |
48 |
49 | Processor
50 | Unarchiver
51 | Arguments
52 |
53 | archive_path
54 | %pathname%
55 | destination_path
56 | %RECIPE_CACHE_DIR%/unzip
57 | purge_destination
58 |
59 |
60 |
61 |
62 | Processor
63 | CodeSignatureVerifier
64 | Arguments
65 |
66 | input_path
67 | %RECIPE_CACHE_DIR%/unzip/%APP_NAME%
68 | requirement
69 | %CODE_SIGNATURE%
70 | strict_verification
71 |
72 |
73 |
74 |
75 | Processor
76 | PlistReader
77 | Arguments
78 |
79 | info_path
80 | %RECIPE_CACHE_DIR%/unzip/%APP_NAME%/Contents/Info.plist
81 | plist_keys
82 |
83 | CFBundleIdentifier
84 | bundleid
85 | CFBundleShortVersionString
86 | version
87 |
88 |
89 |
90 |
91 | Processor
92 | AppPkgCreator
93 | Arguments
94 |
95 | app_path
96 | %RECIPE_CACHE_DIR%/unzip/%APP_NAME%
97 | bundleid
98 | %bundleid%
99 | version
100 | %version%
101 | pkg_path
102 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
103 |
104 |
105 |
106 | Processor
107 | PathDeleter
108 | Arguments
109 |
110 | path_list
111 |
112 | %RECIPE_CACHE_DIR%/unzip
113 |
114 |
115 |
116 |
117 |
118 |
119 |
--------------------------------------------------------------------------------
/example-recipes/GoogleChrome.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest version of Google Chrome, extracts it from a DMG, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.googlechrome
9 | Input
10 |
11 | NAME
12 | Google Chrome
13 | APP_NAME
14 | %NAME%.app
15 | DOWNLOAD_URL
16 | https://dl.google.com/chrome/mac/universal/stable/GGRO/googlechrome.dmg
17 | CODE_SIGNATURE
18 | (identifier "com.google.Chrome" or identifier "com.google.Chrome.beta" or identifier "com.google.Chrome.dev" or identifier "com.google.Chrome.canary") and anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = EQHXZ8M8AV
19 |
20 | MinimumVersion
21 | 1.0.0
22 | Process
23 |
24 |
25 | Processor
26 | URLDownloader
27 | Arguments
28 |
29 | url
30 | %DOWNLOAD_URL%
31 | filename
32 | %NAME%.dmg
33 |
34 |
35 |
36 | Processor
37 | StopProcessingIf
38 | Arguments
39 |
40 | predicate
41 | download_changed == False
42 |
43 |
44 |
45 | Processor
46 | EndOfCheckPhase
47 |
48 |
49 | Processor
50 | CodeSignatureVerifier
51 | Arguments
52 |
53 | input_path
54 | %pathname%/%APP_NAME%
55 | requirement
56 | %CODE_SIGNATURE%
57 | deep_verification
58 |
59 |
60 |
61 |
62 | Processor
63 | AppDmgVersioner
64 | Arguments
65 |
66 | dmg_path
67 | %pathname%
68 |
69 |
70 |
71 | Processor
72 | AppPkgCreator
73 | Arguments
74 |
75 | app_path
76 | %pathname%/%APP_NAME%
77 | bundleid
78 | %bundleid%
79 | version
80 | %version%
81 | pkg_path
82 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
83 |
84 |
85 |
86 |
87 |
88 |
--------------------------------------------------------------------------------
/example-recipes/MicrosoftExcel.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest signed package for Microsoft Excel.
7 | Identifier
8 | io.kandji.pkg.microsoftexcel
9 | Input
10 |
11 | NAME
12 | Microsoft Excel
13 | APP_NAME
14 | %NAME%.app
15 | PRODUCT_ID
16 | 525135
17 | DOWNLOAD_URL
18 | https://go.microsoft.com/fwlink/?linkid=%PRODUCT_ID%
19 | PKG_SIGNATURE
20 |
21 | Developer ID Installer: Microsoft Corporation (UBF8T346G9)
22 | Developer ID Certification Authority
23 | Apple Root CA
24 |
25 | SUB_PKG_WITH_APP
26 | Microsoft_Excel.pkg/Payload
27 | PAYLOAD_INFO_PLIST_PATH
28 | payload/%APP_NAME%/Contents/Info.plist
29 |
30 | MinimumVersion
31 | 1.0.0
32 | Process
33 |
34 |
35 | Processor
36 | URLDownloader
37 | Arguments
38 |
39 | url
40 | %DOWNLOAD_URL%
41 | filename
42 | %NAME%.pkg
43 |
44 |
45 |
46 | Processor
47 | StopProcessingIf
48 | Arguments
49 |
50 | predicate
51 | download_changed == False
52 |
53 |
54 |
55 | Processor
56 | EndOfCheckPhase
57 |
58 |
59 | Processor
60 | CodeSignatureVerifier
61 | Arguments
62 |
63 | input_path
64 | %pathname%
65 | expected_authority_names
66 | %PKG_SIGNATURE%
67 |
68 |
69 |
70 | Processor
71 | FlatPkgUnpacker
72 | Arguments
73 |
74 | flat_pkg_path
75 | %pathname%
76 | destination_path
77 | %RECIPE_CACHE_DIR%/unpack
78 |
79 |
80 |
81 | Processor
82 | PkgPayloadUnpacker
83 | Arguments
84 |
85 | pkg_payload_path
86 | %RECIPE_CACHE_DIR%/unpack/%SUB_PKG_WITH_APP%
87 | destination_path
88 | %RECIPE_CACHE_DIR%/payload
89 |
90 |
91 |
92 | Processor
93 | PlistReader
94 | Arguments
95 |
96 | info_path
97 | %RECIPE_CACHE_DIR%/%PAYLOAD_INFO_PLIST_PATH%
98 | plist_keys
99 |
100 | CFBundleShortVersionString
101 | version
102 |
103 |
104 |
105 |
106 | Processor
107 | PkgCopier
108 | Arguments
109 |
110 | source_pkg
111 | %pathname%
112 | pkg_path
113 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
114 |
115 |
116 |
117 | Processor
118 | PathDeleter
119 | Arguments
120 |
121 | path_list
122 |
123 | %RECIPE_CACHE_DIR%/unpack
124 | %RECIPE_CACHE_DIR%/payload
125 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/example-recipes/MicrosoftPowerPoint.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest signed package for Microsoft PowerPoint.
7 | Identifier
8 | io.kandji.pkg.microsoftpowerpoint
9 | Input
10 |
11 | NAME
12 | Microsoft PowerPoint
13 | APP_NAME
14 | %NAME%.app
15 | PRODUCT_ID
16 | 525136
17 | DOWNLOAD_URL
18 | https://go.microsoft.com/fwlink/?linkid=%PRODUCT_ID%
19 | PKG_SIGNATURE
20 |
21 | Developer ID Installer: Microsoft Corporation (UBF8T346G9)
22 | Developer ID Certification Authority
23 | Apple Root CA
24 |
25 | SUB_PKG_WITH_APP
26 | Microsoft_PowerPoint.pkg/Payload
27 | PAYLOAD_INFO_PLIST_PATH
28 | payload/%APP_NAME%/Contents/Info.plist
29 |
30 | MinimumVersion
31 | 1.0.0
32 | Process
33 |
34 |
35 | Processor
36 | URLDownloader
37 | Arguments
38 |
39 | url
40 | %DOWNLOAD_URL%
41 | filename
42 | %NAME%.pkg
43 |
44 |
45 |
46 | Processor
47 | StopProcessingIf
48 | Arguments
49 |
50 | predicate
51 | download_changed == False
52 |
53 |
54 |
55 | Processor
56 | EndOfCheckPhase
57 |
58 |
59 | Processor
60 | CodeSignatureVerifier
61 | Arguments
62 |
63 | input_path
64 | %pathname%
65 | expected_authority_names
66 | %PKG_SIGNATURE%
67 |
68 |
69 |
70 | Processor
71 | FlatPkgUnpacker
72 | Arguments
73 |
74 | flat_pkg_path
75 | %pathname%
76 | destination_path
77 | %RECIPE_CACHE_DIR%/unpack
78 |
79 |
80 |
81 | Processor
82 | PkgPayloadUnpacker
83 | Arguments
84 |
85 | pkg_payload_path
86 | %RECIPE_CACHE_DIR%/unpack/%SUB_PKG_WITH_APP%
87 | destination_path
88 | %RECIPE_CACHE_DIR%/payload
89 |
90 |
91 |
92 | Processor
93 | PlistReader
94 | Arguments
95 |
96 | info_path
97 | %RECIPE_CACHE_DIR%/%PAYLOAD_INFO_PLIST_PATH%
98 | plist_keys
99 |
100 | CFBundleShortVersionString
101 | version
102 |
103 |
104 |
105 |
106 | Processor
107 | PkgCopier
108 | Arguments
109 |
110 | source_pkg
111 | %pathname%
112 | pkg_path
113 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
114 |
115 |
116 |
117 | Processor
118 | PathDeleter
119 | Arguments
120 |
121 | path_list
122 |
123 | %RECIPE_CACHE_DIR%/unpack
124 | %RECIPE_CACHE_DIR%/payload
125 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/example-recipes/MicrosoftRemoteDesktop.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest signed package for Microsoft Remote Desktop.
7 | Identifier
8 | io.kandji.pkg.microsoftrdc
9 | Input
10 |
11 | NAME
12 | Microsoft Remote Desktop
13 | APP_NAME
14 | %NAME%.app
15 | PRODUCT_ID
16 | 868963
17 | DOWNLOAD_URL
18 | https://go.microsoft.com/fwlink/?linkid=%PRODUCT_ID%
19 | PKG_SIGNATURE
20 |
21 | Developer ID Installer: Microsoft Corporation (UBF8T346G9)
22 | Developer ID Certification Authority
23 | Apple Root CA
24 |
25 | SUB_PKG_WITH_APP
26 | com.microsoft.rdc.macos.pkg/Payload
27 | PAYLOAD_INFO_PLIST_PATH
28 | payload/%APP_NAME%/Contents/Info.plist
29 |
30 | MinimumVersion
31 | 1.0.0
32 | Process
33 |
34 |
35 | Processor
36 | URLDownloader
37 | Arguments
38 |
39 | url
40 | %DOWNLOAD_URL%
41 | filename
42 | %NAME%.pkg
43 |
44 |
45 |
46 | Processor
47 | StopProcessingIf
48 | Arguments
49 |
50 | predicate
51 | download_changed == False
52 |
53 |
54 |
55 | Processor
56 | EndOfCheckPhase
57 |
58 |
59 | Processor
60 | CodeSignatureVerifier
61 | Arguments
62 |
63 | input_path
64 | %pathname%
65 | expected_authority_names
66 | %PKG_SIGNATURE%
67 |
68 |
69 |
70 | Processor
71 | FlatPkgUnpacker
72 | Arguments
73 |
74 | flat_pkg_path
75 | %pathname%
76 | destination_path
77 | %RECIPE_CACHE_DIR%/unpack
78 |
79 |
80 |
81 | Processor
82 | PkgPayloadUnpacker
83 | Arguments
84 |
85 | pkg_payload_path
86 | %RECIPE_CACHE_DIR%/unpack/%SUB_PKG_WITH_APP%
87 | destination_path
88 | %RECIPE_CACHE_DIR%/payload
89 |
90 |
91 |
92 | Processor
93 | PlistReader
94 | Arguments
95 |
96 | info_path
97 | %RECIPE_CACHE_DIR%/%PAYLOAD_INFO_PLIST_PATH%
98 | plist_keys
99 |
100 | CFBundleShortVersionString
101 | version
102 |
103 |
104 |
105 |
106 | Processor
107 | PkgCopier
108 | Arguments
109 |
110 | source_pkg
111 | %pathname%
112 | pkg_path
113 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
114 |
115 |
116 |
117 | Processor
118 | PathDeleter
119 | Arguments
120 |
121 | path_list
122 |
123 | %RECIPE_CACHE_DIR%/unpack
124 | %RECIPE_CACHE_DIR%/payload
125 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/example-recipes/MicrosoftWord.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest signed package for Microsoft Word.
7 | Identifier
8 | io.kandji.pkg.microsoftword
9 | Input
10 |
11 | NAME
12 | Microsoft Word
13 | APP_NAME
14 | %NAME%.app
15 | PRODUCT_ID
16 | 525134
17 | DOWNLOAD_URL
18 | https://go.microsoft.com/fwlink/?linkid=%PRODUCT_ID%
19 | PKG_SIGNATURE
20 |
21 | Developer ID Installer: Microsoft Corporation (UBF8T346G9)
22 | Developer ID Certification Authority
23 | Apple Root CA
24 |
25 | SUB_PKG_WITH_APP
26 | Microsoft_Word.pkg/Payload
27 | PAYLOAD_INFO_PLIST_PATH
28 | payload/%APP_NAME%/Contents/Info.plist
29 |
30 | MinimumVersion
31 | 1.0.0
32 | Process
33 |
34 |
35 | Processor
36 | URLDownloader
37 | Arguments
38 |
39 | url
40 | %DOWNLOAD_URL%
41 | filename
42 | %NAME%.pkg
43 |
44 |
45 |
46 | Processor
47 | StopProcessingIf
48 | Arguments
49 |
50 | predicate
51 | download_changed == False
52 |
53 |
54 |
55 | Processor
56 | EndOfCheckPhase
57 |
58 |
59 | Processor
60 | CodeSignatureVerifier
61 | Arguments
62 |
63 | input_path
64 | %pathname%
65 | expected_authority_names
66 | %PKG_SIGNATURE%
67 |
68 |
69 |
70 | Processor
71 | FlatPkgUnpacker
72 | Arguments
73 |
74 | flat_pkg_path
75 | %pathname%
76 | destination_path
77 | %RECIPE_CACHE_DIR%/unpack
78 |
79 |
80 |
81 | Processor
82 | PkgPayloadUnpacker
83 | Arguments
84 |
85 | pkg_payload_path
86 | %RECIPE_CACHE_DIR%/unpack/%SUB_PKG_WITH_APP%
87 | destination_path
88 | %RECIPE_CACHE_DIR%/payload
89 |
90 |
91 |
92 | Processor
93 | PlistReader
94 | Arguments
95 |
96 | info_path
97 | %RECIPE_CACHE_DIR%/%PAYLOAD_INFO_PLIST_PATH%
98 | plist_keys
99 |
100 | CFBundleShortVersionString
101 | version
102 |
103 |
104 |
105 |
106 | Processor
107 | PkgCopier
108 | Arguments
109 |
110 | source_pkg
111 | %pathname%
112 | pkg_path
113 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
114 |
115 |
116 |
117 | Processor
118 | PathDeleter
119 | Arguments
120 |
121 | path_list
122 |
123 | %RECIPE_CACHE_DIR%/unpack
124 | %RECIPE_CACHE_DIR%/payload
125 |
126 |
127 |
128 |
129 |
130 |
131 |
--------------------------------------------------------------------------------
/example-recipes/PyCharmCE.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest Apple silicon version of PyCharm CE, extracts it from a DMG, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.pycharmce
9 | Input
10 |
11 | NAME
12 | PyCharm CE
13 | APP_NAME
14 | %NAME%.app
15 | SEARCH_URL
16 | https://data.services.jetbrains.com/products/releases?code=PCC
17 | RE_PATTERN
18 | macM1":{"link":"(.+?dmg)"
19 | CODE_SIGNATURE
20 | identifier "com.jetbrains.pycharm.ce" and anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = "2ZEFAR8TH3"
21 |
22 | MinimumVersion
23 | 1.0.0
24 | Process
25 |
26 |
27 | Processor
28 | URLTextSearcher
29 | Arguments
30 |
31 | re_pattern
32 | %RE_PATTERN%
33 | url
34 | %SEARCH_URL%
35 |
36 |
37 |
38 | Processor
39 | URLDownloader
40 | Arguments
41 |
42 | filename
43 | %NAME%.dmg
44 | url
45 | %match%
46 | CHECK_FILESIZE_ONLY
47 |
48 |
49 |
50 |
51 | Processor
52 | StopProcessingIf
53 | Arguments
54 |
55 | predicate
56 | download_changed == False
57 |
58 |
59 |
60 | Processor
61 | EndOfCheckPhase
62 |
63 |
64 | Processor
65 | CodeSignatureVerifier
66 | Arguments
67 |
68 | input_path
69 | %pathname%/%APP_NAME%
70 | requirement
71 | %CODE_SIGNATURE%
72 | strict_verification
73 |
74 |
75 |
76 |
77 | Processor
78 | AppDmgVersioner
79 | Arguments
80 |
81 | dmg_path
82 | %pathname%
83 |
84 |
85 |
86 | Processor
87 | AppPkgCreator
88 | Arguments
89 |
90 | app_path
91 | %pathname%/%APP_NAME%
92 | bundleid
93 | %bundleid%
94 | version
95 | %version%
96 | pkg_path
97 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
98 |
99 |
100 |
101 |
102 |
103 |
--------------------------------------------------------------------------------
/example-recipes/TableauDesktop.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the current release version of Tableau Desktop and extracts the installer package.
7 | Identifier
8 | io.kandji.pkg.tableaudesktop
9 | Input
10 |
11 | NAME
12 | TableauDesktop
13 | DOWNLOAD_URL
14 | https://www.tableau.com/downloads/desktop/mac
15 | PKG_SIGNATURE
16 |
17 | Developer ID Installer: Tableau Software, LLC (QJ4XPRK37C)
18 | Developer ID Certification Authority
19 | Apple Root CA
20 |
21 |
22 | MinimumVersion
23 | 1.0.0
24 | Process
25 |
26 |
27 | Processor
28 | URLDownloader
29 | Arguments
30 |
31 | url
32 | %DOWNLOAD_URL%
33 | filename
34 | %NAME%.dmg
35 |
36 |
37 |
38 | Processor
39 | StopProcessingIf
40 | Arguments
41 |
42 | predicate
43 | download_changed == False
44 |
45 |
46 |
47 | Processor
48 | EndOfCheckPhase
49 |
50 |
51 | Processor
52 | CodeSignatureVerifier
53 | Arguments
54 |
55 | input_path
56 | %pathname%/*.pkg
57 | expected_authority_names
58 | %PKG_SIGNATURE%
59 |
60 |
61 |
62 | Processor
63 | FlatPkgUnpacker
64 | Arguments
65 |
66 | flat_pkg_path
67 | %pathname%/*.pkg
68 | destination_path
69 | %RECIPE_CACHE_DIR%/unpack
70 |
71 |
72 |
73 | Processor
74 | PkgPayloadUnpacker
75 | Arguments
76 |
77 | pkg_payload_path
78 | %RECIPE_CACHE_DIR%/unpack/Tableau App.pkg/Payload
79 | destination_path
80 | %RECIPE_CACHE_DIR%/payload
81 |
82 |
83 |
84 | Processor
85 | FileFinder
86 | Arguments
87 |
88 | pattern
89 | %RECIPE_CACHE_DIR%/payload/Tableau Desktop*.app
90 |
91 | Comment
92 | Glob here because the app bundle name changes with the versioning
93 |
94 |
95 | Processor
96 | PlistReader
97 | Arguments
98 |
99 | info_path
100 | %found_filename%/Contents/Info.plist
101 | plist_keys
102 |
103 | CFBundleIdentifier
104 | bundleid
105 | CFBundleShortVersionString
106 | version
107 |
108 |
109 |
110 |
111 | Processor
112 | PkgCopier
113 | Arguments
114 |
115 | source_pkg
116 | %pathname%/*.pkg
117 | pkg_path
118 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
119 |
120 |
121 |
122 | Processor
123 | PathDeleter
124 | Arguments
125 |
126 | path_list
127 |
128 | %RECIPE_CACHE_DIR%/unpack
129 | %RECIPE_CACHE_DIR%/payload
130 |
131 |
132 |
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/example-recipes/VLC.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest version of VLC, extracts it from a DMG, and creates a PKG.
7 | Identifier
8 | io.kandji.pkg.vlc
9 | Input
10 |
11 | NAME
12 | VLC
13 | APP_NAME
14 | %NAME%.app
15 | SEARCH_URL
16 | https://www.videolan.org/vlc/download-macosx.html
17 | RE_PATTERN
18 | ([\d.]+\/macosx\/vlc-[\d.]+-universal.dmg)
19 | CODE_SIGNATURE
20 | identifier "org.videolan.vlc" and anchor apple generic and certificate 1[field.1.2.840.113635.100.6.2.6] /* exists */ and certificate leaf[field.1.2.840.113635.100.6.1.13] /* exists */ and certificate leaf[subject.OU] = "75GAHG3SZQ"
21 |
22 | MinimumVersion
23 | 1.0.0
24 | Process
25 |
26 |
27 | Processor
28 | URLTextSearcher
29 | Arguments
30 |
31 | re_pattern
32 | %RE_PATTERN%
33 | url
34 | %SEARCH_URL%
35 | result_output_var_name
36 | DOWNLOAD_SUFFIX
37 |
38 |
39 |
40 | Processor
41 | URLDownloader
42 | Arguments
43 |
44 | url
45 | https://get.videolan.org/vlc/%DOWNLOAD_SUFFIX%
46 | filename
47 | %NAME%.dmg
48 | CHECK_FILESIZE_ONLY
49 |
50 |
51 |
52 |
53 | Processor
54 | StopProcessingIf
55 | Arguments
56 |
57 | predicate
58 | download_changed == False
59 |
60 |
61 |
62 | Processor
63 | EndOfCheckPhase
64 |
65 |
66 | Processor
67 | CodeSignatureVerifier
68 | Arguments
69 |
70 | input_path
71 | %pathname%/%APP_NAME%
72 | requirement
73 | %CODE_SIGNATURE%
74 | strict_verification
75 |
76 |
77 |
78 |
79 | Processor
80 | AppDmgVersioner
81 | Arguments
82 |
83 | dmg_path
84 | %pathname%
85 |
86 |
87 |
88 | Processor
89 | AppPkgCreator
90 | Arguments
91 |
92 | app_path
93 | %pathname%/%APP_NAME%
94 | bundleid
95 | %bundleid%
96 | version
97 | %version%
98 | pkg_path
99 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
100 |
101 |
102 |
103 |
104 |
105 |
--------------------------------------------------------------------------------
/example-recipes/Zoom.pkg.recipe:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Description
6 | Downloads the latest signed package for Zoom.
7 | Identifier
8 | io.kandji.pkg.zoom
9 | Input
10 |
11 | NAME
12 | Zoom
13 | APP_NAME
14 | zoom.us.app
15 | DOWNLOAD_URL
16 | https://zoom.us/client/latest/ZoomInstallerIT.pkg
17 | PKG_SIGNATURE
18 |
19 | Developer ID Installer: Zoom Video Communications, Inc. (BJ4HAAB9B3)
20 | Developer ID Certification Authority
21 | Apple Root CA
22 |
23 | SUB_PKG_WITH_APP
24 | zoomus.pkg/Payload
25 | PAYLOAD_INFO_PLIST_PATH
26 | payload/%APP_NAME%/Contents/Info.plist
27 |
28 | MinimumVersion
29 | 1.0.0
30 | Process
31 |
32 |
33 | Processor
34 | URLDownloader
35 | Arguments
36 |
37 | url
38 | %DOWNLOAD_URL%
39 | filename
40 | %NAME%.pkg
41 |
42 |
43 |
44 | Processor
45 | StopProcessingIf
46 | Arguments
47 |
48 | predicate
49 | download_changed == False
50 |
51 |
52 |
53 | Processor
54 | EndOfCheckPhase
55 |
56 |
57 | Processor
58 | CodeSignatureVerifier
59 | Arguments
60 |
61 | input_path
62 | %pathname%
63 | expected_authority_names
64 | %PKG_SIGNATURE%
65 |
66 |
67 |
68 | Processor
69 | FlatPkgUnpacker
70 | Arguments
71 |
72 | flat_pkg_path
73 | %pathname%
74 | destination_path
75 | %RECIPE_CACHE_DIR%/unpack
76 |
77 |
78 |
79 | Processor
80 | PkgPayloadUnpacker
81 | Arguments
82 |
83 | pkg_payload_path
84 | %RECIPE_CACHE_DIR%/unpack/%SUB_PKG_WITH_APP%
85 | destination_path
86 | %RECIPE_CACHE_DIR%/payload
87 |
88 |
89 |
90 | Processor
91 | PlistReader
92 | Arguments
93 |
94 | info_path
95 | %RECIPE_CACHE_DIR%/%PAYLOAD_INFO_PLIST_PATH%
96 | plist_keys
97 |
98 | CFBundleShortVersionString
99 | version
100 |
101 |
102 |
103 |
104 | Processor
105 | PkgCopier
106 | Arguments
107 |
108 | source_pkg
109 | %pathname%
110 | pkg_path
111 | %RECIPE_CACHE_DIR%/%NAME%-%version%.pkg
112 |
113 |
114 |
115 | Processor
116 | PathDeleter
117 | Arguments
118 |
119 | path_list
120 |
121 | %RECIPE_CACHE_DIR%/unpack
122 | %RECIPE_CACHE_DIR%/payload
123 |
124 |
125 |
126 |
127 |
128 |
129 |
--------------------------------------------------------------------------------
/recipe_list.json:
--------------------------------------------------------------------------------
1 | [
2 | "AdobeAcrobatProDC.pkg.recipe",
3 | "AndroidStudio.pkg.recipe",
4 | "BraveBrowser.pkg.recipe",
5 | "Docker.pkg.recipe",
6 | "GitHubDesktop.pkg.recipe",
7 | "GoogleChrome.pkg.recipe",
8 | "MicrosoftExcel.pkg.recipe",
9 | "MicrosoftPowerPoint.pkg.recipe",
10 | "MicrosoftRemoteDesktop.pkg.recipe",
11 | "MicrosoftWord.pkg.recipe",
12 | "PyCharmCE.pkg.recipe",
13 | "TableauDesktop.pkg.recipe",
14 | "VLC.pkg.recipe",
15 | "Zoom.pkg.recipe"
16 | ]
17 |
--------------------------------------------------------------------------------