├── .github ├── dependabot.yml └── workflows │ ├── rspec_and_release.yml │ └── validate-pr.yml ├── .gitignore ├── .rubocop.yml ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── Gemfile ├── Guardfile ├── LICENSE ├── README.md ├── bin └── datadog_backup ├── datadog_backup.gemspec ├── example ├── .github │ └── workflows │ │ └── backup.yml ├── .gitignore ├── Gemfile └── README.md ├── images ├── demo.gif └── demo.yml ├── lib ├── datadog_backup.rb └── datadog_backup │ ├── cli.rb │ ├── dashboards.rb │ ├── deprecations.rb │ ├── local_filesystem.rb │ ├── monitors.rb │ ├── options.rb │ ├── resources.rb │ ├── slos.rb │ ├── synthetics.rb │ ├── thread_pool.rb │ └── version.rb ├── release.config.js └── spec ├── datadog_backup ├── cli_spec.rb ├── core_spec.rb ├── dashboards_spec.rb ├── deprecations_spec.rb ├── local_filesystem_spec.rb ├── monitors_spec.rb ├── slos_spec.rb └── synthetics_spec.rb ├── datadog_backup_bin_spec.rb └── spec_helper.rb /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "bundler" 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" 12 | commit-message: 13 | prefix: "fix" 14 | prefix-development: "chore" 15 | include: "scope" 16 | - package-ecosystem: "github-actions" 17 | directory: "/" # Location of package manifests 18 | schedule: 19 | interval: "daily" 20 | -------------------------------------------------------------------------------- /.github/workflows/rspec_and_release.yml: -------------------------------------------------------------------------------- 1 | name: Rspec and Release 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | 8 | jobs: 9 | rspec: 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | os: [ubuntu-latest, macos-latest] 14 | # Due to https://github.com/actions/runner/issues/849, we have to use quotes for '3.0' 15 | ruby: ['3.0', '3.1', '3.2'] 16 | runs-on: ${{ matrix.os }} 17 | steps: 18 | - uses: actions/checkout@v4 19 | - name: Set up Ruby 20 | uses: ruby/setup-ruby@v1 21 | with: 22 | ruby-version: ${{ matrix.ruby }} 23 | bundler-cache: true 24 | - name: Test with Rspec 25 | run: | 26 | bundle exec rspec --format documentation --require spec_helper 27 | release: 28 | needs: rspec 29 | runs-on: ubuntu-latest 30 | env: 31 | BUNDLE_DEPLOYMENT: true 32 | steps: 33 | - uses: actions/checkout@v4 34 | - name: Set up Ruby 35 | uses: ruby/setup-ruby@v1 36 | with: 37 | ruby-version: 2.7 38 | - name: Zip 39 | run : | 40 | zip -r datadog_backup.zip ./* 41 | - name: Semantic Release 42 | id: semantic 43 | uses: cycjimmy/semantic-release-action@v4 44 | env: 45 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 46 | GEM_HOST_API_KEY: ${{ secrets.RUBYGEMS_API_TOKEN }} 47 | with: 48 | semantic_version: 17 49 | extra_plugins: | 50 | @semantic-release/changelog@5 51 | @semantic-release/git@9 52 | semantic-release-rubygem@1 53 | -------------------------------------------------------------------------------- /.github/workflows/validate-pr.yml: -------------------------------------------------------------------------------- 1 | name: Validate PR Title 2 | on: 3 | pull_request_target: 4 | types: 5 | - opened 6 | - edited 7 | - synchronize 8 | jobs: 9 | main: 10 | runs-on: ubuntu-22.04 11 | steps: 12 | - name: Validate PR Title 13 | uses: amannn/action-semantic-pull-request@v5.4.0 14 | env: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | with: 17 | validateSingleCommit: true -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.gitignore.io 2 | 3 | ### OSX ### 4 | .DS_Store 5 | .AppleDouble 6 | .LSOverride 7 | 8 | # Icon must end with two \r 9 | Icon 10 | 11 | 12 | # Thumbnails 13 | ._* 14 | 15 | # Files that might appear in the root of a volume 16 | .DocumentRevisions-V100 17 | .fseventsd 18 | .Spotlight-V100 19 | .TemporaryItems 20 | .Trashes 21 | .VolumeIcon.icns 22 | 23 | # Directories potentially created on remote AFP share 24 | .AppleDB 25 | .AppleDesktop 26 | Network Trash Folder 27 | Temporary Items 28 | .apdisk 29 | 30 | 31 | ### RubyMine ### 32 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm 33 | 34 | *.iml 35 | 36 | ## Directory-based project format: 37 | .idea/ 38 | # if you remove the above rule, at least ignore the following: 39 | 40 | # User-specific stuff: 41 | # .idea/workspace.xml 42 | # .idea/tasks.xml 43 | # .idea/dictionaries 44 | 45 | # Sensitive or high-churn files: 46 | # .idea/dataSources.ids 47 | # .idea/dataSources.xml 48 | # .idea/sqlDataSources.xml 49 | # .idea/dynamic.xml 50 | # .idea/uiDesigner.xml 51 | 52 | # Gradle: 53 | # .idea/gradle.xml 54 | # .idea/libraries 55 | 56 | # Mongo Explorer plugin: 57 | # .idea/mongoSettings.xml 58 | 59 | ## File-based project format: 60 | *.ipr 61 | *.iws 62 | 63 | ## Plugin-specific files: 64 | 65 | # IntelliJ 66 | /out/ 67 | 68 | # mpeltonen/sbt-idea plugin 69 | .idea_modules/ 70 | 71 | # JIRA plugin 72 | atlassian-ide-plugin.xml 73 | 74 | # Crashlytics plugin (for Android Studio and IntelliJ) 75 | com_crashlytics_export_strings.xml 76 | crashlytics.properties 77 | crashlytics-build.properties 78 | 79 | 80 | ### Ruby ### 81 | *.gem 82 | *.rbc 83 | /.config 84 | /coverage/ 85 | /InstalledFiles 86 | /pkg/ 87 | /spec/reports/ 88 | /test/tmp/ 89 | /test/version_tmp/ 90 | /tmp/ 91 | .rspec 92 | 93 | ## Specific to RubyMotion: 94 | .dat* 95 | .repl_history 96 | build/ 97 | 98 | ## Documentation cache and generated files: 99 | /.yardoc/ 100 | /_yardoc/ 101 | /doc/ 102 | /rdoc/ 103 | 104 | ## Environment normalisation: 105 | /.bundle/ 106 | /vendor/bundle 107 | /lib/bundler/man/ 108 | 109 | # for a library or gem, you might want to ignore these files since the code is 110 | # intended to run in multiple environments; otherwise, check them in: 111 | Gemfile.lock 112 | .ruby-version* 113 | .ruby-gemset* 114 | 115 | # unless supporting rvm < 1.11.0 or doing something fancy, ignore this: 116 | .rvmrc 117 | 118 | spec/helpers/failures.txt 119 | backup/ 120 | 121 | .envrc 122 | 123 | .vscode 124 | spec/examples.txt 125 | 126 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | # The behavior of RuboCop can be controlled via the .rubocop.yml 2 | # configuration file. It makes it possible to enable/disable 3 | # certain cops (checks) and to alter their behavior if they accept 4 | # any parameters. The file can be placed either in your home 5 | # directory or in some project directory. 6 | # 7 | # RuboCop will start looking for the configuration file in the directory 8 | # where the inspected file is and continue its way up to the root directory. 9 | # 10 | # See https://docs.rubocop.org/rubocop/configuration 11 | require: 12 | - rubocop-rspec 13 | 14 | AllCops: 15 | TargetRubyVersion: 2.7 16 | NewCops: enable 17 | 18 | Layout/LineLength: 19 | Enabled: false 20 | 21 | Metrics/BlockLength: 22 | Enabled: false 23 | 24 | Metrics/ClassLength: 25 | Enabled: false 26 | 27 | Metrics/MethodLength: 28 | Enabled: false 29 | 30 | Naming/AccessorMethodName: 31 | Enabled: false 32 | 33 | RSpec/MultipleMemoizedHelpers: 34 | Enabled: false 35 | 36 | RSpec/ExampleLength: 37 | Enabled: false -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [4.0.2](https://github.com/scribd/datadog_backup/compare/v4.0.1...v4.0.2) (2025-03-25) 2 | 3 | 4 | ### Bug Fixes 5 | 6 | * DEVPLAT-3294 Update github actions due to ubuntu-20.04 deprecation ([#167](https://github.com/scribd/datadog_backup/issues/167)) ([2930525](https://github.com/scribd/datadog_backup/commit/2930525eb72d0bfbd978c7dce2c1738f52c847e0)) 7 | 8 | ## [4.0.1](https://github.com/scribd/datadog_backup/compare/v4.0.0...v4.0.1) (2024-04-30) 9 | 10 | 11 | ### Bug Fixes 12 | 13 | * YAML.dump quotes y when dumping ([#164](https://github.com/scribd/datadog_backup/issues/164)) ([f1d6e0d](https://github.com/scribd/datadog_backup/commit/f1d6e0d68bea1da4e37dd971cb710dfccffa0c56)) 14 | 15 | # [4.0.0](https://github.com/scribd/datadog_backup/compare/v3.3.0...v4.0.0) (2024-04-30) 16 | 17 | 18 | ### Bug Fixes 19 | 20 | * Deprecate Ruby 3.0 and Drop support for Ruby 2.7 ([#163](https://github.com/scribd/datadog_backup/issues/163)) ([3d81d65](https://github.com/scribd/datadog_backup/commit/3d81d652bfb35f06b61dfb679c4d9e0d3567efcb)) 21 | 22 | 23 | ### BREAKING CHANGES 24 | 25 | * ruby 2.7 is no longer supported. Please upgrade to ruby 3.0 or higher. 26 | 27 | # [3.3.0](https://github.com/scribd/datadog_backup/compare/v3.2.1...v3.3.0) (2023-08-17) 28 | 29 | 30 | ### Features 31 | 32 | * backup SLOs ([#155](https://github.com/scribd/datadog_backup/issues/155)) ([6cca6e7](https://github.com/scribd/datadog_backup/commit/6cca6e7567895673e94c7de80022c821553698ee)), closes [#1](https://github.com/scribd/datadog_backup/issues/1) 33 | 34 | ## [3.2.1](https://github.com/scribd/datadog_backup/compare/v3.2.0...v3.2.1) (2023-02-11) 35 | 36 | 37 | ### Bug Fixes 38 | 39 | * update error handling for restore ([233b1b2](https://github.com/scribd/datadog_backup/commit/233b1b27d485e7502dd47ca01670cea0576e920d)) 40 | 41 | # [3.2.0](https://github.com/scribd/datadog_backup/compare/v3.1.1...v3.2.0) (2023-02-10) 42 | 43 | 44 | ### Features 45 | 46 | * Deepsort skip sorting arrays ([d9cba97](https://github.com/scribd/datadog_backup/commit/d9cba97015d1af636ca9c03605ef14fa8dcb6d21)) 47 | 48 | ## [3.1.1](https://github.com/scribd/datadog_backup/compare/v3.1.0...v3.1.1) (2022-09-01) 49 | 50 | 51 | ### Bug Fixes 52 | 53 | * catch SystemExit so that rspec can complete ([9eee0e5](https://github.com/scribd/datadog_backup/commit/9eee0e5c5ea857baeafcf3fbc0f1c8f3748b0ca8)) 54 | * specify minimum ruby version in Gemfile ([c7f48f9](https://github.com/scribd/datadog_backup/commit/c7f48f95269a23549e28576434292d4ba1332d59)) 55 | 56 | 57 | ### Reverts 58 | 59 | * Revert "fix: specify minimum ruby version in Gemfile" ([e934ac2](https://github.com/scribd/datadog_backup/commit/e934ac2611c9bb557a181dd2f6f6b2678287773d)) 60 | 61 | # [3.1.0](https://github.com/scribd/datadog_backup/compare/v3.0.0...v3.1.0) (2022-08-30) 62 | 63 | 64 | ### Features 65 | 66 | * backup and restore synthetics ([#139](https://github.com/scribd/datadog_backup/issues/139)) ([a46cadc](https://github.com/scribd/datadog_backup/commit/a46cadc7d196dcb0f20bf31d06cde6a13a390835)) 67 | 68 | # [3.0.0](https://github.com/scribd/datadog_backup/compare/v2.0.2...v3.0.0) (2022-08-25) 69 | 70 | 71 | * feat!: release 3.0 (#136) ([3d23b03](https://github.com/scribd/datadog_backup/commit/3d23b03668e888886f394de2fa4884aa1e3ca287)), closes [#136](https://github.com/scribd/datadog_backup/issues/136) 72 | 73 | 74 | ### BREAKING CHANGES 75 | 76 | * DATADOG_API_KEY and DATADOG_APP_KEY are no longer the environment variables used to authenticate to Datadog. Instead, set the environment variables DD_API_KEY and DD_APP_KEY. 77 | * ruby 2.6 is no longer supported. Please upgrade to ruby 2.7 or higher. 78 | * The options `--ssh` and `--ssshh` are no longer supported. Instead, please use `--quiet` to supress logging. `--debug` remains supported. 79 | * The environment variable `DATADOG_HOST` is no longer supported. Instead, please use `DD_SITE_URL`. 80 | 81 | refactor: The legacy [dogapi-rb ](https://github.com/DataDog/dogapi-rb) gem is replaced with [faraday](https://lostisland.github.io/faraday/). The [official client library](https://github.com/DataDog/datadog-api-client-ruby) was considered, but was not adopted as I had a hard time grok-ing it. 82 | 83 | * chore: permit logging from tests, but only error+ 84 | 85 | Co-authored-by: semantic-release-bot 86 | 87 | # [3.0.0-alpha.2](https://github.com/scribd/datadog_backup/compare/v3.0.0-alpha.1...v3.0.0-alpha.2) (2022-08-25) 88 | 89 | 90 | ### Bug Fixes 91 | 92 | * remove development pry ([611d0a6](https://github.com/scribd/datadog_backup/commit/611d0a6dd5899b0046fc00233cf679834b275089)) 93 | 94 | # [3.0.0-alpha.1](https://github.com/scribd/datadog_backup/compare/v2.0.2...v3.0.0-alpha.1) (2022-08-24) 95 | 96 | 97 | * feat!: release 3.0 ([d09d9e6](https://github.com/scribd/datadog_backup/commit/d09d9e6c845edb35c49cbb19ec6b35878304a078)) 98 | 99 | 100 | ### BREAKING CHANGES 101 | 102 | * DATADOG_API_KEY and DATADOG_APP_KEY are no longer the environment variables used to authenticate to Datadog. Instead, set the environment variables DD_API_KEY and DD_APP_KEY. 103 | * ruby 2.6 is no longer supported. Please upgrade to ruby 2.7 or higher. 104 | * The options `--ssh` and `--ssshh` are no longer supported. Instead, please use `--quiet` to supress logging. `--debug` remains supported. 105 | * The environment variable `DATADOG_HOST` is no longer supported. Instead, please use `DD_SITE_URL`. 106 | 107 | refactor: The legacy [dogapi-rb ](https://github.com/DataDog/dogapi-rb) gem is replaced with [faraday](https://lostisland.github.io/faraday/). The [official client library](https://github.com/DataDog/datadog-api-client-ruby) was considered, but was not adopted as I had a hard time grok-ing it. 108 | 109 | ## [2.0.2](https://github.com/scribd/datadog_backup/compare/v2.0.1...v2.0.2) (2022-08-11) 110 | 111 | 112 | ### Bug Fixes 113 | 114 | * Deprecate Ruby 2.6 and Drop support for Ruby 2.5 ([#132](https://github.com/scribd/datadog_backup/issues/132)) ([432cb2c](https://github.com/scribd/datadog_backup/commit/432cb2c0d8b12d89aef81cf35597aa90f77407eb)) 115 | 116 | ## [2.0.1](https://github.com/scribd/datadog_backup/compare/v2.0.0...v2.0.1) (2022-08-11) 117 | 118 | 119 | ### Bug Fixes 120 | 121 | * include version.rb in release commit ([#130](https://github.com/scribd/datadog_backup/issues/130)) ([f8df6cc](https://github.com/scribd/datadog_backup/commit/f8df6cc48ac9a3521c3c98dfa2c325f96801d001)) 122 | 123 | # [2.0.0](https://github.com/scribd/datadog_backup/compare/v1.1.4...v2.0.0) (2022-08-09) 124 | 125 | 126 | ### Bug Fixes 127 | 128 | * **deps:** bundle update 20220809 ([#129](https://github.com/scribd/datadog_backup/issues/129)) ([9050752](https://github.com/scribd/datadog_backup/commit/9050752070cfb66cdc9320f51e082d3ddee226c5)) 129 | 130 | 131 | * chore!: drop support for ruby 2.5 and 2.6 (EOL) ([29332c3](https://github.com/scribd/datadog_backup/commit/29332c39f6bb829191e840bc24309651a0ff7f16)) 132 | 133 | 134 | ### BREAKING CHANGES 135 | 136 | * ruby 2.5 and 2.6 are no longer supported 137 | 138 | ## [1.1.4](https://github.com/scribd/datadog_backup/compare/v1.1.3...v1.1.4) (2022-06-25) 139 | 140 | 141 | ### Bug Fixes 142 | 143 | * **deps:** update diffy requirement from = 3.4.0 to = 3.4.2 ([d241631](https://github.com/scribd/datadog_backup/commit/d2416319c6285d5b499b8c00d7c430be8f05091d)) 144 | 145 | ## [1.1.3](https://github.com/scribd/datadog_backup/compare/v1.1.2...v1.1.3) (2022-03-23) 146 | 147 | 148 | ### Bug Fixes 149 | 150 | * **deps:** update concurrent-ruby requirement from = 1.1.9 to = 1.1.10 ([e2eebe6](https://github.com/scribd/datadog_backup/commit/e2eebe6a418b5cd7a8e53e48587f40f4c0b8c90f)) 151 | 152 | ## [1.1.2](https://github.com/scribd/datadog_backup/compare/v1.1.1...v1.1.2) (2022-02-01) 153 | 154 | 155 | ### Bug Fixes 156 | 157 | * pin semantic release plugins ([5e92303](https://github.com/scribd/datadog_backup/commit/5e9230362f0b112de190fb1458fc9a3f32423c63)) 158 | 159 | ## [1.1.1](https://github.com/scribd/datadog_backup/compare/v1.1.0...v1.1.1) (2021-10-26) 160 | 161 | 162 | ### Bug Fixes 163 | 164 | * **deps:** update amazing_print requirement from = 1.3.0 to = 1.4.0 ([b7e0ca6](https://github.com/scribd/datadog_backup/commit/b7e0ca61f0fb5acbeb541d3b173aa526a0edcf3d)) 165 | 166 | # [1.1.0](https://github.com/scribd/datadog_backup/compare/v1.0.5...v1.1.0) (2021-07-14) 167 | 168 | 169 | ### Features 170 | 171 | * Add support for ruby 2.5 and 3.0 ([#89](https://github.com/scribd/datadog_backup/issues/89)) ([a181dbc](https://github.com/scribd/datadog_backup/commit/a181dbcfd55220e2fd7ce92d384738f71c50baa8)) 172 | 173 | ## [1.0.5](https://github.com/scribd/datadog_backup/compare/v1.0.4...v1.0.5) (2021-07-12) 174 | 175 | 176 | ### Bug Fixes 177 | 178 | * Add documentation for DATADOG_HOST usage ([69acc25](https://github.com/scribd/datadog_backup/commit/69acc2574d17310ee090486ec46cb06ab0f450db)) 179 | 180 | ## [1.0.4](https://github.com/scribd/datadog_backup/compare/v1.0.3...v1.0.4) (2021-07-08) 181 | 182 | 183 | ### Bug Fixes 184 | 185 | * remove max queue size limit ([b5ee79c](https://github.com/scribd/datadog_backup/commit/b5ee79cc587ef95cebf89bbd8efe9d829af63c8a)) 186 | 187 | ## [1.0.3](https://github.com/scribd/datadog_backup/compare/v1.0.2...v1.0.3) (2021-06-10) 188 | 189 | 190 | ### Bug Fixes 191 | 192 | * **deps:** update concurrent-ruby requirement from = 1.1.8 to = 1.1.9 ([31ccccb](https://github.com/scribd/datadog_backup/commit/31ccccbc890792670946923f51e5b883f4cf3e87)) 193 | 194 | ## [1.0.2](https://github.com/scribd/datadog_backup/compare/v1.0.1...v1.0.2) (2021-05-06) 195 | 196 | 197 | ### Bug Fixes 198 | 199 | * **deps:** bump rexml from 3.2.4 to 3.2.5 ([15efa8c](https://github.com/scribd/datadog_backup/commit/15efa8c58953d450311fc8e5f125bf7e12401af4)) 200 | 201 | ## [1.0.1](https://github.com/scribd/datadog_backup/compare/v1.0.0...v1.0.1) (2021-03-26) 202 | 203 | 204 | ### Bug Fixes 205 | 206 | * dependabot syntax for github ([4214001](https://github.com/scribd/datadog_backup/commit/42140015976ec2d0f4d2fce6e4c3214bb590c967)) 207 | 208 | # [1.0.0](https://github.com/scribd/datadog_backup/compare/v0.11.0...v1.0.0) (2021-03-02) 209 | 210 | 211 | ### Bug Fixes 212 | 213 | * handle gets with no result ([8d016a1](https://github.com/scribd/datadog_backup/commit/8d016a1858b44d374a0dff121c71340bf18062e0)) 214 | 215 | 216 | ### Features 217 | 218 | * If resource doesn't exist in Datadog, the resource is recreated. ([18ba241](https://github.com/scribd/datadog_backup/commit/18ba24183e136f9d899351bbb0999aba2c22308f)) 219 | 220 | 221 | ### BREAKING CHANGES 222 | 223 | * `datadog-backup` used to exit with an error if a resource 224 | wasn't found in Datadog. 225 | 226 | # [0.11.0](https://github.com/scribd/datadog_backup/compare/v0.10.3...v0.11.0) (2021-01-12) 227 | 228 | 229 | ### Features 230 | 231 | * Add force-restore flag to allow running in automation ([#46](https://github.com/scribd/datadog_backup/issues/46)) ([e067386](https://github.com/scribd/datadog_backup/commit/e0673862b6f6d86297e1352faaee872f2c4884c8)) 232 | 233 | ## [0.10.3](https://github.com/scribd/datadog_backup/compare/v0.10.2...v0.10.3) (2020-12-11) 234 | 235 | 236 | ### Performance Improvements 237 | 238 | * coerce patch release ([bc86649](https://github.com/scribd/datadog_backup/commit/bc86649b874cd5be1da2f6bc0d1b1ecd0728676c)) 239 | 240 | ## [0.10.2](https://github.com/scribd/datadog_backup/compare/v0.10.1...v0.10.2) (2020-11-03) 241 | 242 | 243 | ### Bug Fixes 244 | 245 | * virtual environment updates ruby 2.7.1 -> 2.7.2 ([f950dd6](https://github.com/scribd/datadog_backup/commit/f950dd67ce989bb12de5f2dbf69c6449b91f2542)) 246 | 247 | ## [0.10.1](https://github.com/scribd/datadog_backup/compare/v0.10.0...v0.10.1) (2020-09-08) 248 | 249 | 250 | ### Bug Fixes 251 | 252 | * update dependencies ([939ddc7](https://github.com/scribd/datadog_backup/commit/939ddc766eaccc2428eae6486979b919f3bd1c1e)) 253 | 254 | # [0.10.0](https://github.com/scribd/datadog_backup/compare/v0.9.0...v0.10.0) (2020-08-14) 255 | 256 | 257 | ### Features 258 | 259 | * select log levels ([0272d27](https://github.com/scribd/datadog_backup/commit/0272d27530188b36c2b56da6dc075e7507635ecd)) 260 | 261 | # [0.9.0](https://github.com/scribd/datadog_backup/compare/v0.8.0...v0.9.0) (2020-08-11) 262 | 263 | 264 | ### Features 265 | 266 | * public release of datadog_backup ([50d3582](https://github.com/scribd/datadog_backup/commit/50d358284fa3f2c561b1025a3b4f5ce4b4433116)) 267 | 268 | # [0.8.0](https://github.com/scribd/datadog_backup/compare/v0.7.0...v0.8.0) (2020-08-07) 269 | 270 | 271 | ### Features 272 | 273 | * sort keys and ignore banlist for consistency ([ca683a6](https://github.com/scribd/datadog_backup/commit/ca683a63d58eeefee98b5909f830baa0e0bfa426)) 274 | 275 | # [0.7.0](https://github.com/scribd/datadog_backup/compare/v0.6.0...v0.7.0) (2020-08-07) 276 | 277 | 278 | ### Features 279 | 280 | * Purge before backup, so that deletions can be detected. ([bdcb2b0](https://github.com/scribd/datadog_backup/commit/bdcb2b08a2e2e908f8b85359d0a43c392c5253ab)) 281 | 282 | # [0.6.0](https://github.com/scribd/datadog_backup/compare/v0.5.0...v0.6.0) (2020-08-05) 283 | 284 | 285 | ### Bug Fixes 286 | 287 | * enable datadog_backup executable to run restore ([5094813](https://github.com/scribd/datadog_backup/commit/50948132b154c30956b87b5ec1e9070d34a48a02)) 288 | * order is not guaranteed ([d65b9a8](https://github.com/scribd/datadog_backup/commit/d65b9a872c268bcd91384f9f0215b88bdc5e9544)) 289 | * restore actually restores ([0e80999](https://github.com/scribd/datadog_backup/commit/0e80999b9d90cecb7c04c639d593987d89c35616)) 290 | * rspec properly handles SystemExit ([cf26bfb](https://github.com/scribd/datadog_backup/commit/cf26bfb7dd28d4b14c2126487848cab4d9af2bf9)) 291 | 292 | 293 | ### Features 294 | 295 | * add restore flow ([8175a03](https://github.com/scribd/datadog_backup/commit/8175a033b34b268f4a9850d1629dfdb21e86d2fa)) 296 | * Change defaults to YAML(backups) and color(diffs) ([d9ed708](https://github.com/scribd/datadog_backup/commit/d9ed7084f4cb8a5357ce0ab2927df770f0b841ed)) 297 | * ignore key ordering changes ([47e6e9f](https://github.com/scribd/datadog_backup/commit/47e6e9f7f5ac7824a57f8cee45bfe68867bba760)) 298 | * use Diffy diffs rather than HashDiff:x ([e5529b8](https://github.com/scribd/datadog_backup/commit/e5529b8f8501c2534f26bd0d541afa20c076b5c0)) 299 | 300 | # [0.5.0](https://github.com/scribd/datadog_backup/compare/v0.4.0...v0.5.0) (2020-07-30) 301 | 302 | 303 | ### Features 304 | 305 | * Handle Ctrl-c by closing down the worker pool ([96791ba](https://github.com/scribd/datadog_backup/commit/96791ba23997114c356a097eb7a096ef2f7bd31c)) 306 | * use thread pool to globally limit thread count ([915f5c2](https://github.com/scribd/datadog_backup/commit/915f5c27be2fdf1f3bde40c34d8999ee1248de43)) 307 | 308 | # [0.4.0](https://github.com/scribd/datadog_backup/compare/v0.3.0...v0.4.0) (2020-07-27) 309 | 310 | 311 | ### Bug Fixes 312 | 313 | * use maintained 'amazing_print' gem, rather than suffer a bajillion error messages ([a988416](https://github.com/scribd/datadog_backup/commit/a988416de11fd83ddd2d5bcf5b78bed59de65694)) 314 | 315 | 316 | ### Features 317 | 318 | * `diffs` provides a diff between what's on disk and in datadog ([8599b47](https://github.com/scribd/datadog_backup/commit/8599b47cd8761292247331982d9332f6b4da07b4)) 319 | * add `diffs` function ([3f5cd41](https://github.com/scribd/datadog_backup/commit/3f5cd41ae6bae99abb44a1495ea76c527ddc0428)) 320 | 321 | # [0.3.0](https://github.com/scribd/datadog_backup/compare/v0.2.0...v0.3.0) (2020-07-24) 322 | 323 | 324 | ### Features 325 | 326 | * add yaml option ([5645e71](https://github.com/scribd/datadog_backup/commit/5645e71826ee474201d54f51ed061ab1d3f9e872)) 327 | 328 | # [0.2.0](https://github.com/scribd/datadog_backup/compare/v0.1.0...v0.2.0) (2020-07-18) 329 | 330 | 331 | ### Features 332 | 333 | * send debug array to DEBUG ([0ef8bd7](https://github.com/scribd/datadog_backup/commit/0ef8bd71beba051e5ba4cddc9142507b505bc945)) 334 | 335 | # [0.1.0](https://github.com/scribd/datadog_backup/compare/v0.0.3...v0.1.0) (2020-07-17) 336 | 337 | 338 | ### Features 339 | 340 | * change default backup directory to 'backup' ([27c8f69](https://github.com/scribd/datadog_backup/commit/27c8f6914147801b10de7e24cfa7e2742010fd89)) 341 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at rtyler@scribd.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source 'https://rubygems.org' 4 | 5 | gemspec 6 | -------------------------------------------------------------------------------- /Guardfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # A sample Guardfile 4 | # More info at https://github.com/guard/guard#readme 5 | 6 | ## Uncomment and set this to only include directories you want to watch 7 | # directories %w(app lib config test spec features) \ 8 | # .select{|d| Dir.exist?(d) ? d : UI.warning("Directory #{d} does not exist")} 9 | 10 | ## Note: if you are using the `directories` clause above and you are not 11 | ## watching the project directory ('.'), then you will want to move 12 | ## the Guardfile to a watched dir and symlink it back, e.g. 13 | # 14 | # $ mkdir config 15 | # $ mv Guardfile config/ 16 | # $ ln -s config/Guardfile . 17 | # 18 | # and, you'll have to watch "config/Guardfile" instead of "Guardfile" 19 | 20 | # NOTE: The cmd option is now required due to the increasing number of ways 21 | # rspec may be run, below are examples of the most common uses. 22 | # * bundler: 'bundle exec rspec' 23 | # * bundler binstubs: 'bin/rspec' 24 | # * spring: 'bin/rspec' (This will use spring if running and you have 25 | # installed the spring binstubs per the docs) 26 | # * zeus: 'zeus rspec' (requires the server to be started separately) 27 | # * 'just' rspec: 'rspec' 28 | 29 | guard :rspec, cmd: 'bundle exec rspec' do 30 | require 'guard/rspec/dsl' 31 | dsl = Guard::RSpec::Dsl.new(self) 32 | 33 | # Feel free to open issues for suggestions and improvements 34 | 35 | # RSpec files 36 | rspec = dsl.rspec 37 | watch(rspec.spec_helper) { rspec.spec_dir } 38 | watch(rspec.spec_support) { rspec.spec_dir } 39 | watch(rspec.spec_files) 40 | 41 | # Ruby files 42 | ruby = dsl.ruby 43 | dsl.watch_spec_files_for(ruby.lib_files) 44 | end 45 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Scribd, Inc 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Datadog Backup 2 | 3 | ![Rspec and Release](https://github.com/scribd/datadog_backup/workflows/Rspec%20and%20Release/badge.svg) 4 | [![Gem Version](https://badge.fury.io/rb/datadog_backup.svg)](https://badge.fury.io/rb/datadog_backup) 5 | 6 | Use `datadog_backup` to backup your datadog account. 7 | Currently supports 8 | 9 | - dashboards 10 | - monitors 11 | 12 | Additional features may be built out over time. 13 | 14 | # v3 Migration 15 | 16 | ## Breaking Changes 17 | v3 is a backwards incompatible change. 18 | 19 | - [ ] DATADOG_API_KEY and DATADOG_APP_KEY are no longer the environment variables used to authenticate to Datadog. Instead, set the environment variables DD_API_KEY and DD_APP_KEY. 20 | - [ ] ruby 2.7 is no longer supported. Please upgrade to ruby 3.0 or higher. 21 | - [ ] The options `--ssh` and `--ssshh` are no longer supported. Instead, please use `--quiet` to supress logging. `--debug` remains supported. 22 | - [ ] The environment variable `DATADOG_HOST` is no longer supported. Instead, please use `DD_SITE_URL`. 23 | 24 | ## Misc 25 | - [ ] The legacy [dogapi-rb ](https://github.com/DataDog/dogapi-rb) gem is replaced with [faraday](https://lostisland.github.io/faraday/). The [official client library](https://github.com/DataDog/datadog-api-client-ruby) was considered, but was not adopted as I had a hard time grok-ing it. 26 | 27 | 28 | ## Installation 29 | 30 | ``` 31 | gem install datadog_backup 32 | ``` 33 | 34 | ## Usage 35 | 36 | ![demo](images/demo.gif) 37 | 38 | ``` 39 | DD_API_KEY=example123 DD_APP_KEY=example123 datadog_backup [--backup-dir /path/to/backups] [--debug] [--monitors-only] [--dashboards-only] [--diff-format color|html|html_simple] [--no-color] [--json] 40 | ``` 41 | 42 | ``` 43 | gem install datadog_backup 44 | export DD_API_KEY=abc123 45 | export DD_APP_KEY=abc123 46 | 47 | # Perform backup to `./backup/` using YAML encoding 48 | datadog_backup backup 49 | 50 | # Make some changes 51 | 52 | # Just review the changes since last backup 53 | datadog_backup diffs 54 | 55 | # Review the changes since last backup and apply local changes to datadog 56 | 57 | datadog_backup restore 58 | ``` 59 | ## Parameters 60 | 61 | Supply the following parameters in order to customize datadog_backup: 62 | 63 | parameter | description | default 64 | ---------------------|-------------------------------------------------------------------------------------------------------------------------------|-------------------------- 65 | --debug | log debug and above | info 66 | --quiet | only show errors and above | info 67 | --backup-dir PATH | path to the directory to backup to or restore from | `./backup/` 68 | --monitors-only | only backup monitors | backup monitors and dashboards 69 | --dashboards-only | only backup dashboards | backup monitors and dashboards 70 | --json | format backups as JSON instead of YAML. Does not impact `diffs` nor `restore`, but do not mix formats in the same backup-dir. | YAML 71 | --no-color | removes colored output from diff format 72 | --diff-format FORMAT | one of `color`, `html_simple`, `html` | `color` 73 | --force-restore | Force restore to Datadog. Do not ask to validate. Non-interactive. 74 | --disable-array-sort | Do not sort array elements, to preserver order of dashboard widgets. 75 | --h, --help | help 76 | 77 | ## Environment variables 78 | 79 | The following environment variables can be set in order to further customize datadog_backup: 80 | 81 | environment variable | description | default 82 | ---------------------|--------------------------------------------------------------------------------|-------------------------- 83 | DD_SITE_URL | Describe the API endpoint to connect to (https://api.datadoghq.eu for example) | https://api.datadoghq.com 84 | DD_API_KEY | The API key for the Datadog account | none 85 | DD_API_KEY | The Application key for the Datadog account | none 86 | 87 | 88 | ### Usage in a Github repo 89 | 90 | See [example/](https://github.com/scribd/datadog_backup/tree/main/example) for an example implementation as a repo that backs up your Datadog dashboards hourly. 91 | 92 | # Development 93 | 94 | Releases are cut using [semantic-release](https://github.com/semantic-release/semantic-release). 95 | 96 | Please write commit messages following [Angular commit guidelines](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#-git-commit-guidelines) 97 | -------------------------------------------------------------------------------- /bin/datadog_backup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | $LOAD_PATH.unshift File.join(File.dirname(__FILE__), '../lib') 5 | 6 | require 'logger' 7 | require 'optparse' 8 | $stdout.sync = $stderr.sync = true 9 | LOGGER = Logger.new($stderr) unless defined?(LOGGER) 10 | LOGGER.level = Logger::INFO 11 | 12 | require 'datadog_backup' 13 | 14 | def fatal(message) 15 | LOGGER.fatal(message) 16 | exit 1 17 | end 18 | 19 | def options_valid?(options) 20 | %w[backup diffs restore].include?(options[:action]) 21 | %w[DD_API_KEY DD_APP_KEY].all? { |key| ENV.fetch(key, nil) } 22 | end 23 | 24 | def prereqs(defaults) # rubocop:disable Metrics/AbcSize 25 | ARGV << '--help' if ARGV.empty? 26 | 27 | result = defaults.dup 28 | 29 | options = OptionParser.new do |opts| 30 | opts.banner = "Usage: DD_API_KEY=abc123 DD_APP_KEY=abc123 #{File.basename($PROGRAM_NAME)} " 31 | opts.separator '' 32 | opts.on_tail('-h', '--help', 'Show this message') do 33 | puts opts 34 | exit 0 35 | end 36 | opts.on('--debug', 'log debug and above') do 37 | LOGGER.level = Logger::DEBUG 38 | end 39 | opts.on('--quiet', 'log errors and above') do 40 | LOGGER.level = Logger::ERROR 41 | end 42 | opts.on('--backup-dir PATH', '`backup` by default') do |path| 43 | result[:backup_dir] = path 44 | end 45 | opts.on('--monitors-only') do 46 | result[:resources] = [DatadogBackup::Monitors] 47 | end 48 | opts.on('--dashboards-only') do 49 | result[:resources] = [DatadogBackup::Dashboards] 50 | end 51 | opts.on('--slos-only') do 52 | result[:resources] = [DatadogBackup::SLOs] 53 | end 54 | opts.on('--synthetics-only') do 55 | result[:resources] = [DatadogBackup::Synthetics] 56 | end 57 | opts.on( 58 | '--json', 59 | 'format backups as JSON instead of YAML. Does not impact `diffs` nor `restore`, but do not mix formats in the same backup-dir.' 60 | ) do 61 | result[:output_format] = :json 62 | end 63 | opts.on('--no-color', 'removes colored output from diff format') do 64 | result[:diff_format] = nil 65 | end 66 | opts.on('--diff-format FORMAT', 'one of `color`, `html_simple`, `html`') do |format| 67 | result[:diff_format] = format.to_sym 68 | end 69 | opts.on('--force-restore', 'Force restore to Datadog. Do not ask to validate. Non-interactive.') do 70 | result[:force_restore] = true 71 | end 72 | opts.on('--disable-array-sort', 'Do not sort array elements, to preserver order of dashboard widgets.') do 73 | result[:disable_array_sort] = true 74 | end 75 | end 76 | options.parse! 77 | 78 | result[:action] = ARGV.first 79 | fatal(options.banner) unless options_valid?(result) 80 | result 81 | end 82 | 83 | ## 84 | # Default parameters 85 | defaults = { 86 | action: nil, 87 | backup_dir: File.join(ENV.fetch('PWD'), 'backup'), 88 | diff_format: :color, 89 | resources: [DatadogBackup::Dashboards, DatadogBackup::Monitors, DatadogBackup::SLOs, DatadogBackup::Synthetics], 90 | output_format: :yaml, 91 | force_restore: false, 92 | disable_array_sort: false 93 | } 94 | 95 | DatadogBackup::Cli.new(prereqs(defaults)).run! 96 | -------------------------------------------------------------------------------- /datadog_backup.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | lib = File.expand_path('lib', __dir__) 4 | $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) 5 | require 'datadog_backup/version' 6 | 7 | Gem::Specification.new do |spec| 8 | spec.name = 'datadog_backup' 9 | spec.version = DatadogBackup::VERSION 10 | spec.authors = ['Kamran Farhadi', 'Jim Park'] 11 | spec.email = ['kamranf@scribd.com', 'jimp@scribd.com'] 12 | spec.summary = 'A utility to backup and restore Datadog accounts' 13 | spec.description = 'A utility to backup and restore Datadog accounts' 14 | spec.homepage = 'https://github.com/scribd/datadog_backup' 15 | spec.license = 'MIT' 16 | spec.metadata['rubygems_mfa_required'] = 'true' 17 | 18 | spec.files = `git ls-files -z`.split("\x0") 19 | spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } 20 | spec.require_paths = ['lib'] 21 | 22 | spec.required_ruby_version = '>= 3.0' 23 | 24 | spec.add_dependency 'amazing_print' 25 | spec.add_dependency 'concurrent-ruby' 26 | spec.add_dependency 'deepsort' 27 | spec.add_dependency 'diffy' 28 | spec.add_dependency 'faraday' 29 | spec.add_dependency 'faraday-retry' 30 | 31 | spec.add_development_dependency 'bundler' 32 | spec.add_development_dependency 'guard-rspec' 33 | spec.add_development_dependency 'pry' 34 | spec.add_development_dependency 'pry-byebug' 35 | spec.add_development_dependency 'rspec' 36 | spec.add_development_dependency 'rubocop' 37 | spec.add_development_dependency 'rubocop-rspec' 38 | end 39 | -------------------------------------------------------------------------------- /example/.github/workflows/backup.yml: -------------------------------------------------------------------------------- 1 | name: backup 2 | 3 | on: 4 | schedule: 5 | - cron: "0 * * * *" 6 | workflow_dispatch: 7 | 8 | jobs: 9 | backup: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Set up Ruby 3.1 15 | uses: ruby/setup-ruby@v1 16 | with: 17 | ruby-version: 3.1 18 | - name: perform backup 19 | env: 20 | DD_API_KEY: ${{ secrets.DD_API_KEY }} 21 | DD_APP_KEY: ${{ secrets.DD_APP_KEY }} 22 | run: | 23 | gem install --no-document bundler 24 | bundle install --jobs 4 --retry 3 25 | bundle exec datadog_backup backup 26 | - name: commit changes 27 | uses: stefanzweifel/git-auto-commit-action@v5 28 | with: 29 | commit_message: "Changes as of run: ${{ github.run_id }}" 30 | file_pattern: backup/ 31 | repository: . 32 | -------------------------------------------------------------------------------- /example/.gitignore: -------------------------------------------------------------------------------- 1 | .bundle/ 2 | -------------------------------------------------------------------------------- /example/Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source 'https://rubygems.org' 4 | 5 | gem 'datadog_backup' 6 | -------------------------------------------------------------------------------- /example/README.md: -------------------------------------------------------------------------------- 1 | # Example-datadog-backups 2 | Dashboards and monitors are backed up here on an hourly basis. 3 | 4 | Github Actions uses the [datadog_backup gem](https://github.com/scribd/datadog_backup) in order to sync the latest copy of what's in Datadog. 5 | 6 | ## Performing edits to Datadog monitors and dashboards 7 | 1. Do it in the Datadog UI 8 | 2. At the top of the hour, the changes will be recorded here. 9 | 3. If you're in a rush, click on "Run workflow" from the Github Actions workflow menus 10 | 11 | ## Performing bulk edits to Datadog monitors and dashboards 12 | 1. Clone this repo 13 | 2. `bundle install` 14 | 3. `bundle exec datadog_backup backup` to download the latest changes in Datadog. 15 | 4. Make your changes locally. 16 | 5. `bundle exec datadog_backup restore` to apply your changes. 17 | 6. Review each change and apply it (r), or download the latest copy from Datadog (d). 18 | -------------------------------------------------------------------------------- /images/demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scribd/datadog_backup/ff7de23f2447d82b8fa43d5c86e3ac087af0a493/images/demo.gif -------------------------------------------------------------------------------- /images/demo.yml: -------------------------------------------------------------------------------- 1 | # The configurations that used for the recording, feel free to edit them 2 | config: 3 | 4 | # Specify a command to be executed 5 | # like `/bin/bash -l`, `ls`, or any other commands 6 | # the default is bash for Linux 7 | # or powershell.exe for Windows 8 | command: bash -l 9 | 10 | # Specify the current working directory path 11 | # the default is the current working directory path 12 | cwd: /Users/jimp/demo 13 | 14 | # Export additional ENV variables 15 | env: 16 | recording: true 17 | 18 | # Explicitly set the number of columns 19 | # or use `auto` to take the current 20 | # number of columns of your shell 21 | cols: 164 22 | 23 | # Explicitly set the number of rows 24 | # or use `auto` to take the current 25 | # number of rows of your shell 26 | rows: 43 27 | 28 | # Amount of times to repeat GIF 29 | # If value is -1, play once 30 | # If value is 0, loop indefinitely 31 | # If value is a positive number, loop n times 32 | repeat: 0 33 | 34 | # Quality 35 | # 1 - 100 36 | quality: 100 37 | 38 | # Delay between frames in ms 39 | # If the value is `auto` use the actual recording delays 40 | frameDelay: auto 41 | 42 | # Maximum delay between frames in ms 43 | # Ignored if the `frameDelay` isn't set to `auto` 44 | # Set to `auto` to prevent limiting the max idle time 45 | maxIdleTime: 2000 46 | 47 | # The surrounding frame box 48 | # The `type` can be null, window, floating, or solid` 49 | # To hide the title use the value null 50 | # Don't forget to add a backgroundColor style with a null as type 51 | frameBox: 52 | type: floating 53 | title: Datadog Backup 54 | style: 55 | border: 0px black solid 56 | # boxShadow: none 57 | # margin: 0px 58 | 59 | # Add a watermark image to the rendered gif 60 | # You need to specify an absolute path for 61 | # the image on your machine or a URL, and you can also 62 | # add your own CSS styles 63 | watermark: 64 | imagePath: null 65 | style: 66 | position: absolute 67 | right: 15px 68 | bottom: 15px 69 | width: 100px 70 | opacity: 0.9 71 | 72 | # Cursor style can be one of 73 | # `block`, `underline`, or `bar` 74 | cursorStyle: block 75 | 76 | # Font family 77 | # You can use any font that is installed on your machine 78 | # in CSS-like syntax 79 | fontFamily: "Monaco, Lucida Console, Ubuntu Mono, Monospace" 80 | 81 | # The size of the font 82 | fontSize: 12 83 | 84 | # The height of lines 85 | lineHeight: 1 86 | 87 | # The spacing between letters 88 | letterSpacing: 0 89 | 90 | # Theme 91 | theme: 92 | background: "transparent" 93 | foreground: "#afafaf" 94 | cursor: "#c7c7c7" 95 | black: "#232628" 96 | red: "#fc4384" 97 | green: "#b3e33b" 98 | yellow: "#ffa727" 99 | blue: "#75dff2" 100 | magenta: "#ae89fe" 101 | cyan: "#708387" 102 | white: "#d5d5d0" 103 | brightBlack: "#626566" 104 | brightRed: "#ff7fac" 105 | brightGreen: "#c8ed71" 106 | brightYellow: "#ebdf86" 107 | brightBlue: "#75dff2" 108 | brightMagenta: "#ae89fe" 109 | brightCyan: "#b1c6ca" 110 | brightWhite: "#f9f9f4" 111 | 112 | # Records, feel free to edit them 113 | records: 114 | - delay: 1000 115 | content: "\e[?1034h\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 116 | - delay: 40 117 | content: g 118 | - delay: 40 119 | content: e 120 | - delay: 40 121 | content: m 122 | - delay: 40 123 | content: ' ' 124 | - delay: 40 125 | content: i 126 | - delay: 40 127 | content: n 128 | - delay: 40 129 | content: s 130 | - delay: 40 131 | content: t 132 | - delay: 40 133 | content: a 134 | - delay: 40 135 | content: l 136 | - delay: 40 137 | content: l 138 | - delay: 40 139 | content: ' ' 140 | - delay: 40 141 | content: d 142 | - delay: 40 143 | content: a 144 | - delay: 40 145 | content: t 146 | - delay: 40 147 | content: a 148 | - delay: 40 149 | content: d 150 | - delay: 40 151 | content: o 152 | - delay: 40 153 | content: g 154 | - delay: 40 155 | content: _ 156 | - delay: 40 157 | content: b 158 | - delay: 40 159 | content: a 160 | - delay: 40 161 | content: c 162 | - delay: 40 163 | content: k 164 | - delay: 40 165 | content: u 166 | - delay: 40 167 | content: p 168 | - delay: 40 169 | content: "\r\n" 170 | - delay: 400 171 | content: "Fetching datadog_backup-0.10.0.gem\r\n" 172 | - delay: 40 173 | content: "Successfully installed datadog_backup-0.10.0\r\n" 174 | - delay: 40 175 | content: "Parsing documentation for datadog_backup-0.10.0\r\n" 176 | - delay: 40 177 | content: "Installing ri documentation for datadog_backup-0.10.0\r\n" 178 | - delay: 40 179 | content: "Done installing documentation for datadog_backup after 0 seconds\r\n1 gem installed\r\n" 180 | - delay: 40 181 | content: "\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 182 | - delay: 400 183 | content: d 184 | - delay: 40 185 | content: a 186 | - delay: 40 187 | content: t 188 | - delay: 40 189 | content: a 190 | - delay: 40 191 | content: d 192 | - delay: 40 193 | content: o 194 | - delay: 40 195 | content: g 196 | - delay: 40 197 | content: _ 198 | - delay: 40 199 | content: b 200 | - delay: 40 201 | content: a 202 | - delay: 40 203 | content: c 204 | - delay: 40 205 | content: k 206 | - delay: 40 207 | content: u 208 | - delay: 40 209 | content: p 210 | - delay: 40 211 | content: ' ' 212 | - delay: 40 213 | content: '-' 214 | - delay: 40 215 | content: '-' 216 | - delay: 40 217 | content: h 218 | - delay: 40 219 | content: e 220 | - delay: 40 221 | content: l 222 | - delay: 40 223 | content: p 224 | - delay: 40 225 | content: "\r\n" 226 | - delay: 400 227 | content: "Usage: datadog_backup \r\n\r\n --debug log debug and above\r\n --shh log warnings and above\r\n --shhh log errors and above\r\n --backup-dir PATH `backup` by default\r\n --monitors-only\r\n --dashboards-only\r\n --json format backups as JSON instead of YAML. Does not impact `diffs` nor `restore`, but do not mix formats in the same backup-dir.\r\n --no-color removes colored output from diff format\r\n --diff-format FORMAT one of `color`, `html_simple`, `html`\r\n -h, --help Show this message\r\n" 228 | - delay: 400 229 | content: "\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 230 | - delay: 40 231 | content: d 232 | - delay: 40 233 | content: a 234 | - delay: 40 235 | content: t 236 | - delay: 40 237 | content: a 238 | - delay: 40 239 | content: d 240 | - delay: 40 241 | content: o 242 | - delay: 40 243 | content: g 244 | - delay: 40 245 | content: _ 246 | - delay: 40 247 | content: b 248 | - delay: 40 249 | content: a 250 | - delay: 40 251 | content: c 252 | - delay: 40 253 | content: k 254 | - delay: 40 255 | content: u 256 | - delay: 40 257 | content: p 258 | - delay: 40 259 | content: ' ' 260 | - delay: 40 261 | content: '-' 262 | - delay: 40 263 | content: '-' 264 | - delay: 40 265 | content: s 266 | - delay: 40 267 | content: h 268 | - delay: 40 269 | content: h 270 | - delay: 40 271 | content: h 272 | - delay: 40 273 | content: ' ' 274 | - delay: 40 275 | content: b 276 | - delay: 40 277 | content: a 278 | - delay: 40 279 | content: c 280 | - delay: 40 281 | content: k 282 | - delay: 40 283 | content: u 284 | - delay: 40 285 | content: p 286 | - delay: 40 287 | content: "\r\n" 288 | - delay: 400 289 | content: "/Users/jimp/demo/backup/dashboards/s2s-uyz-74d.yaml\r\n" 290 | - delay: 400 291 | content: "\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 292 | - delay: 400 293 | content: v 294 | - delay: 40 295 | content: i 296 | - delay: 40 297 | content: m 298 | - delay: 40 299 | content: ' ' 300 | - delay: 400 301 | content: "/Users/jimp/demo/backup/dashboards/s2s-uyz-74d.yaml" 302 | - delay: 400 303 | content: "\r\n" 304 | - delay: 400 305 | content: "\e[?1000h\e]50;CursorShape=0\a\e[?1004h\e[?1049h\e[?1049h\e[>4;2m\e[?1h\e=\e[?2004h\e[1;42r\e[?12h\e[?12l\e[22;2t\e[22;1t\e[27m\e[29m\e[m\e[38;5;250m\e[48;5;235m\e[H\e[2J\e[?25l\e[42;1H\"backup/dashboards/s2s-uyz-74d.yaml\" 20L, 542B" 306 | - delay: 40 307 | content: "\e[?1000l\e[?2004l\e[>4;m" 308 | - delay: 40 309 | content: "\e[?2004h\e[>4;2m\e[?1000h" 310 | - delay: 40 311 | content: "\e[?2004h\e[>4;2m\e[?1000l\e[?2004l\e[>4;m" 312 | - delay: 40 313 | content: "\e[?2004h\e[>4;2m\e[?1006h\e[?1002h\e[?1006l\e[?1002l\e[?2004l\e[>4;m" 314 | - delay: 40 315 | content: "\e[?2004h\e[>4;2m\e[?1006h\e[?1002h\e[?1006l\e[?1002l\e[?2004l\e[>4;m" 316 | - delay: 40 317 | content: "\e[?2004h\e[>4;2m\e[?1006h\e[?1002h\e[?1006l\e[?1002l\e[?2004l\e[>4;m" 318 | - delay: 40 319 | content: "\e[?2004h\e[>4;2m\e[?1006h\e[?1002h\e[1;1H\e[38;5;239m 1 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;139m---\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 2 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mauthor_handle\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m jimp@scribd.com\r\n\e[38;5;239m 3 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mauthor_name\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m Sung Park\r\n\e[38;5;239m 4 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mcreated_at\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;143m'2020-06-22T00:45:23.876622+00:00'\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 5 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mdescription\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m This timeboard just got an additional upgrade.\r\n\e[38;5;239m 6 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mid\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m s2s-uyz-74d\r\n\e[38;5;239m 7 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mis_read_only\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;173mfalse\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 8 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mlayout_type\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m ordered\r\n\e[38;5;239m 9 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mnotify_list\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;250m[]\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 10 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mtemplate_variables\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;250m[]\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 11 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mtitle\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m Jim's Exceptionally Acceptably Average Timeboard\r\n\e[38;5;239m 12 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mwidgets\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 13 \e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m- \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mdefinition\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 14 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;167mlegend_size\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;143m'0'\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 15 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;167mrequests\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 16 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;250m- \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167mq\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m avg:system.disk.in_use{role:conversion}\r\n\e[38;5;239m 17 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;167mshow_legend\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;173mfalse\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m 18 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;167mtitle\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m system.disk.in_use with role:conversion\r\n\e[38;5;239m 19 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;167mtype\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m timeseries\r\n\e[38;5;239m 20 \e[m\e[38;5;250m\e[48;5;235m \e[38;5;167mid\e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m:\e[m\e[38;5;250m\e[48;5;235m \e[38;5;173m3954182908525150\e[m\e[38;5;250m\e[48;5;235m\r\n\e[38;5;239m~ \e[22;1H~ \e[23;1H~ \e[24;1H~ \e[25;1H~ \e[26;1H~ \e[27;1H~ \e[28;1H~ \e[29;1H~ \e[30;1H~ \e[31;1H~ \e[32;1H~ \e[33;1H~ \e[34;1H~ \e[35;1H~ \e[36;1H~ \e[37;1H~ " 320 | - delay: 40 321 | content: " \e[38;1H~ \e[39;1H~ \e[40;1H~ " 322 | - delay: 40 323 | content: "\e[?2004h\e[>4;2m\e[m\e[38;5;250m\e[48;5;235m\e[41;1H\e[1m\e[38;5;235m\e[48;5;109m NORMAL \e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m\e[48;5;236m backup/dashboards/s2s-uyz-74d.yaml yaml \e[m\e[38;5;250m\e[48;5;235m\e[38;5;250m\e[48;5;239m utf-8[unix] \e[m\e[38;5;250m\e[48;5;235m\e[38;5;235m\e[48;5;109m 5% \e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;109m☰ 1/20 ㏑\e[m\e[38;5;250m\e[48;5;235m\e[38;5;235m\e[48;5;109m : 1 \e[1;5H\e[?25h" 324 | - delay: 40 325 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[1;5H" 326 | - delay: 40 327 | content: "\e[42;154H \e[2;5H\e[41;143H\e[38;5;235m\e[48;5;109m10\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m2/\e[2;5H\e[?25h" 328 | - delay: 40 329 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[2;5H\e[42;154H \e[3;5H\e[41;144H\e[38;5;235m\e[48;5;109m5\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m3/\e[3;5H\e[?25h" 330 | - delay: 40 331 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[3;5H\e[42;154H \e[4;5H\e[41;143H\e[38;5;235m\e[48;5;109m20\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m4/\e[4;5H\e[?25h" 332 | - delay: 40 333 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[4;5H\e[42;154H \e[5;5H\e[41;144H\e[38;5;235m\e[48;5;109m5\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m5/\e[5;5H\e[?25h" 334 | - delay: 40 335 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[5;5H\e[42;154H \e[6;5H\e[41;143H\e[38;5;235m\e[48;5;109m30\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m6/\e[6;5H\e[?25h" 336 | - delay: 40 337 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[6;5H\e[42;154H \e[7;5H\e[41;144H\e[38;5;235m\e[48;5;109m5\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m7/\e[7;5H\e[?25h" 338 | - delay: 40 339 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[7;5H\e[42;154H \e[8;5H\e[41;143H\e[38;5;235m\e[48;5;109m40\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m8/\e[8;5H\e[?25h" 340 | - delay: 40 341 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[8;5H\e[42;154H \e[9;5H\e[41;144H\e[38;5;235m\e[48;5;109m5\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m9/\e[9;5H\e[?25h" 342 | - delay: 40 343 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[9;5H\e[42;154H \e[10;5H\e[41;143H\e[38;5;235m\e[48;5;109m50\e[m\e[38;5;250m\e[48;5;235m\e[6C\e[1m\e[38;5;235m\e[48;5;109m10/\e[10;5H\e[?25h" 344 | - delay: 40 345 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hj\e[10;5H\e[42;154H \e[11;5H\e[41;144H\e[38;5;235m\e[48;5;109m5\e[m\e[38;5;250m\e[48;5;235m\e[7C\e[1m\e[38;5;235m\e[48;5;109m1/\e[11;5H\e[?25h" 346 | - delay: 40 347 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;5H\e[42;154H \e[11;6H\e[41;163H\e[38;5;235m\e[48;5;109m2\e[11;6H\e[?25h" 348 | - delay: 40 349 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;6H\e[42;154H \e[11;7H\e[41;163H\e[38;5;235m\e[48;5;109m3\e[11;7H\e[?25h" 350 | - delay: 40 351 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;7H\e[42;154H \e[11;8H\e[41;163H\e[38;5;235m\e[48;5;109m4\e[11;8H\e[?25h" 352 | - delay: 40 353 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;8H\e[42;154H \e[11;9H\e[41;163H\e[38;5;235m\e[48;5;109m5\e[11;9H\e[?25h" 354 | - delay: 40 355 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;9H\e[42;154H \e[11;10H\e[41;163H\e[38;5;235m\e[48;5;109m6\e[11;10H\e[?25h" 356 | - delay: 40 357 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;10H\e[42;154H \e[11;11H\e[41;163H\e[38;5;235m\e[48;5;109m7\e[11;11H\e[?25h" 358 | - delay: 40 359 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;11H\e[42;154H \e[11;12H\e[41;163H\e[38;5;235m\e[48;5;109m8\e[11;12H\e[?25h" 360 | - delay: 40 361 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;12H\e[42;154H \e[11;13H\e[41;163H\e[38;5;235m\e[48;5;109m9\e[11;13H\e[?25h" 362 | - delay: 40 363 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;13H\e[42;154H \e[11;14H\e[41;162H\e[38;5;235m\e[48;5;109m10\e[11;14H\e[?25h\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;14H\e[42;154H \e[11;15H\e[41;163H\e[38;5;235m\e[48;5;109m1\e[11;15H\e[?25h" 364 | - delay: 40 365 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;15H\e[42;154H \e[11;16H\e[41;163H\e[38;5;235m\e[48;5;109m2\e[11;16H\e[?25h" 366 | - delay: 40 367 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;16H\e[42;154H \e[11;17H\e[41;163H\e[38;5;235m\e[48;5;109m3\e[11;17H\e[?25h" 368 | - delay: 40 369 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;17H\e[42;154H \e[11;18H\e[41;163H\e[38;5;235m\e[48;5;109m4\e[11;18H\e[?25h" 370 | - delay: 40 371 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;18H\e[42;154H \e[11;19H\e[41;163H\e[38;5;235m\e[48;5;109m5\e[11;19H\e[?25h\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;19H\e[42;154H \e[11;20H\e[41;163H\e[38;5;235m\e[48;5;109m6\e[11;20H\e[?25h" 372 | - delay: 40 373 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;20H\e[42;154H \e[11;21H\e[41;163H\e[38;5;235m\e[48;5;109m7\e[11;21H\e[?25h" 374 | - delay: 40 375 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;21H\e[42;154H \e[11;22H\e[41;163H\e[38;5;235m\e[48;5;109m8\e[11;22H\e[?25h" 376 | - delay: 40 377 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;22H\e[42;154H \e[11;23H\e[41;163H\e[38;5;235m\e[48;5;109m9\e[11;23H\e[?25h" 378 | - delay: 40 379 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;23H\e[42;154H \e[11;24H\e[41;162H\e[38;5;235m\e[48;5;109m20\e[11;24H\e[?25h" 380 | - delay: 40 381 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;24H\e[42;154H \e[11;25H\e[41;163H\e[38;5;235m\e[48;5;109m1\e[11;25H\e[?25h" 382 | - delay: 40 383 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;25H\e[42;154H \e[11;26H\e[41;163H\e[38;5;235m\e[48;5;109m2\e[11;26H\e[?25h" 384 | - delay: 40 385 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;26H\e[42;154H \e[11;27H\e[41;163H\e[38;5;235m\e[48;5;109m3\e[11;27H\e[?25h" 386 | - delay: 40 387 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;27H\e[42;154H \e[11;28H\e[41;163H\e[38;5;235m\e[48;5;109m4\e[11;28H\e[?25h" 388 | - delay: 40 389 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;28H\e[42;154H \e[11;29H\e[41;163H\e[38;5;235m\e[48;5;109m5\e[11;29H\e[?25h" 390 | - delay: 40 391 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;29H\e[42;154H \e[11;30H\e[41;163H\e[38;5;235m\e[48;5;109m6\e[11;30H\e[?25h" 392 | - delay: 40 393 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;30H\e[42;154H \e[11;31H\e[41;163H\e[38;5;235m\e[48;5;109m7\e[11;31H\e[?25h" 394 | - delay: 40 395 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;31H\e[42;154H \e[11;32H\e[41;163H\e[38;5;235m\e[48;5;109m8\e[11;32H\e[?25h" 396 | - delay: 40 397 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hl\e[11;32H\e[42;154H \e[11;33H\e[41;163H\e[38;5;235m\e[48;5;109m9\e[11;33H\e[?25h" 398 | - delay: 40 399 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154Hc\e[11;33H\e[?25h" 400 | - delay: 40 401 | content: "\e[?25l\e[42;154H \e[11;33H\e[?25h\e]50;CursorShape=0\a\e[?25l\e[42;154Hcw\e[11;33H" 402 | - delay: 40 403 | content: "\e[42;154H \e[11;33H\e[42;1H\e[1m\e[38;5;143m-- INSERT --\e[m\e[38;5;250m\e[48;5;235m\e[42;13H\e[K\e]50;CursorShape=1\a\e[11;33H Average Timeboard\e[11;51H\e[K\e[41;1H\e[1m\e[38;5;235m\e[48;5;143m INSERT\e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;109m \e[m\e[38;5;250m\e[48;5;235m\b\e[1m\e[38;5;235m\e[48;5;143m \e[m\e[38;5;250m\e[48;5;235m\e[38;5;167m\e[48;5;236m backup/dashboards/s2s-uyz-74d.yaml[+] \e[m\e[38;5;250m\e[48;5;235m\e[6C\e[38;5;143m\e[48;5;235m utf-8[unix] \e[m\e[38;5;250m\e[48;5;235m\e[38;5;235m\e[48;5;143m 55% \e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;143m☰ 11\e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;109m/\e[m\e[38;5;250m\e[48;5;235m\b\e[1m\e[38;5;235m\e[48;5;143m/20 ㏑\e[m\e[38;5;250m\e[48;5;235m\e[38;5;235m\e[48;5;109m \e[m\e[38;5;250m\e[48;5;235m\b\e[38;5;235m\e[48;5;143m : 29 \e[11;33H\e[?25h" 404 | - delay: 40 405 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235mb Average Timeboard\e[41;162H\e[38;5;235m\e[48;5;143m30\e[11;34H\e[?25h" 406 | - delay: 40 407 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235mo Average Timeboard\e[41;163H\e[38;5;235m\e[48;5;143m1\e[11;35H\e[?25h" 408 | - delay: 40 409 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235mv Average Timeboard\e[41;163H\e[38;5;235m\e[48;5;143m2\e[11;36H\e[?25h" 410 | - delay: 40 411 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235me Average Timeboard\e[41;163H\e[38;5;235m\e[48;5;143m3\e[11;37H\e[?25h" 412 | - delay: 40 413 | content: "\e[m\e[38;5;250m\e[48;5;235m\e[42;1H\e[K\e[11;36H\e[?25l\e[42;154H^[\e[11;36H" 414 | - delay: 40 415 | content: "\e[42;154H \e[11;37H\e]50;CursorShape=0\a" 416 | - delay: 40 417 | content: "\e[41;1H\e[1m\e[38;5;235m\e[48;5;109m NORMAL\e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;143m \e[m\e[38;5;250m\e[48;5;235m\b\e[1m\e[38;5;235m\e[48;5;109m \e[m\e[38;5;250m\e[48;5;235m\e[119C\e[38;5;250m\e[48;5;239m utf-8[unix] \e[m\e[38;5;250m\e[48;5;235m\e[38;5;235m\e[48;5;109m 55% \e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;109m☰ 11\e[m\e[38;5;250m\e[48;5;235m\e[1m\e[38;5;235m\e[48;5;143m/\e[m\e[38;5;250m\e[48;5;235m\b\e[1m\e[38;5;235m\e[48;5;109m/20 ㏑\e[m\e[38;5;250m\e[48;5;235m\e[38;5;235m\e[48;5;143m \e[m\e[38;5;250m\e[48;5;235m\b\e[38;5;235m\e[48;5;109m : 32 \e[11;36H\e[?25h" 418 | - delay: 40 419 | content: "\e[?25l\e[m\e[38;5;250m\e[48;5;235m\e[42;154H:\e[11;36H\e[42;154H\e[K\e[42;1H:\e[?25h" 420 | - delay: 40 421 | content: x 422 | - delay: 400 423 | content: "\r\e[?25l\e[?1006l\e[?1002l\e[?2004l\e[>4;m\"backup/dashboards/s2s-uyz-74d.yaml\"" 424 | - delay: 400 425 | content: ' 20L, 537B written' 426 | - delay: 400 427 | content: "\r" 428 | - delay: 400 429 | content: "\e[23;2t\e[23;1t\r\r\n\e[39;49m\e[?2004l\e[?1l\e>\e[?25h\e[>4;m\e[?1004l\e[?1049l" 430 | - delay: 400 431 | content: "\e[?1034h\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 432 | - delay: 400 433 | content: d 434 | - delay: 40 435 | content: a 436 | - delay: 40 437 | content: t 438 | - delay: 40 439 | content: a 440 | - delay: 40 441 | content: d 442 | - delay: 40 443 | content: o 444 | - delay: 40 445 | content: g 446 | - delay: 40 447 | content: _ 448 | - delay: 40 449 | content: b 450 | - delay: 40 451 | content: a 452 | - delay: 40 453 | content: c 454 | - delay: 40 455 | content: k 456 | - delay: 40 457 | content: u 458 | - delay: 40 459 | content: p 460 | - delay: 40 461 | content: ' ' 462 | - delay: 40 463 | content: '-' 464 | - delay: 40 465 | content: '-' 466 | - delay: 40 467 | content: s 468 | - delay: 40 469 | content: h 470 | - delay: 40 471 | content: h 472 | - delay: 40 473 | content: h 474 | - delay: 40 475 | content: ' ' 476 | - delay: 40 477 | content: d 478 | - delay: 40 479 | content: i 480 | - delay: 40 481 | content: f 482 | - delay: 40 483 | content: f 484 | - delay: 40 485 | content: s 486 | - delay: 40 487 | content: "\r\n" 488 | - delay: 400 489 | content: " ---\r\n id: s2s-uyz-74d\r\n ---\r\n author_handle: jimp@scribd.com\r\n author_name: Sung Park\r\n created_at: '2020-06-22T00:45:23.876622+00:00'\r\n description: This timeboard just got an additional upgrade.\r\n id: s2s-uyz-74d\r\n is_read_only: false\r\n layout_type: ordered\r\n notify_list: []\r\n template_variables: []\r\n\e[31m-title: Jim's Exceptionally Acceptably Average Timeboard\e[0m\r\n\e[32m+title: Jim's Exceptionally Above Average Timeboard\e[0m\r\n widgets:\r\n - definition:\r\n legend_size: '0'\r\n requests:\r\n - q: avg:system.disk.in_use{role:conversion}\r\n show_legend: false\r\n title: system.disk.in_use with role:conversion\r\n type: timeseries\r\n id: 3954182908525150\r\n" 490 | - delay: 400 491 | content: "\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 492 | - delay: 400 493 | content: d 494 | - delay: 40 495 | content: a 496 | - delay: 40 497 | content: t 498 | - delay: 40 499 | content: a 500 | - delay: 40 501 | content: d 502 | - delay: 40 503 | content: o 504 | - delay: 40 505 | content: g 506 | - delay: 40 507 | content: _ 508 | - delay: 40 509 | content: b 510 | - delay: 40 511 | content: a 512 | - delay: 40 513 | content: c 514 | - delay: 40 515 | content: k 516 | - delay: 40 517 | content: u 518 | - delay: 40 519 | content: p 520 | - delay: 40 521 | content: ' ' 522 | - delay: 40 523 | content: '-' 524 | - delay: 40 525 | content: '-' 526 | - delay: 40 527 | content: s 528 | - delay: 40 529 | content: h 530 | - delay: 40 531 | content: h 532 | - delay: 40 533 | content: h 534 | - delay: 40 535 | content: ' ' 536 | - delay: 40 537 | content: r 538 | - delay: 40 539 | content: e 540 | - delay: 40 541 | content: s 542 | - delay: 40 543 | content: t 544 | - delay: 40 545 | content: o 546 | - delay: 40 547 | content: r 548 | - delay: 40 549 | content: e 550 | - delay: 400 551 | content: "\r\n" 552 | - delay: 400 553 | content: "--------------------------------------------------------------------------------\r\n ---\r\n id: s2s-uyz-74d\r\n\r\n ---\r\n id: ---\r\n author_handle: jimp@scribd.com\r\n author_name: Sung Park\r\n created_at: '2020-06-22T00:45:23.876622+00:00'\r\n description: This timeboard just got an additional upgrade.\r\n id: s2s-uyz-74d\r\n is_read_only: false\r\n layout_type: ordered\r\n notify_list: []\r\n template_variables: []\r\n\e[31m-title: Jim's Exceptionally Acceptably Average Timeboard\e[0m\r\n\e[32m+title: Jim's Exceptionally Above Average Timeboard\e[0m\r\n widgets:\r\n - definition:\r\n legend_size: '0'\r\n requests:\r\n - q: avg:system.disk.in_use{role:conversion}\r\n show_legend: false\r\n title: system.disk.in_use with role:conversion\r\n type: timeseries\r\n id: 3954182908525150\r\n\r\n(r)estore to Datadog, overwrite local changes and (d)ownload, (s)kip, or (q)uit?\r\n" 554 | - delay: 400 555 | content: r 556 | - delay: 400 557 | content: "\r\n" 558 | - delay: 400 559 | content: "Restoring s2s-uyz-74d to Datadog.\r\n" 560 | - delay: 400 561 | content: "\r\n" 562 | - delay: 400 563 | content: "\r\r\n\e[36;1m\e[0;33m|ruby-2.6.3| \e[0; \e[39min \e[0;32m~/demo\r\r\n\e[36;1m○\e[0;32m \e[0;32m→\e[39m " 564 | -------------------------------------------------------------------------------- /lib/datadog_backup.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'concurrent' 4 | 5 | require_relative 'datadog_backup/local_filesystem' 6 | require_relative 'datadog_backup/options' 7 | require_relative 'datadog_backup/cli' 8 | require_relative 'datadog_backup/resources' 9 | require_relative 'datadog_backup/dashboards' 10 | require_relative 'datadog_backup/monitors' 11 | require_relative 'datadog_backup/slos' 12 | require_relative 'datadog_backup/synthetics' 13 | require_relative 'datadog_backup/thread_pool' 14 | require_relative 'datadog_backup/version' 15 | require_relative 'datadog_backup/deprecations' 16 | DatadogBackup::Deprecations.check 17 | 18 | # DatadogBackup is a gem for backing up and restoring Datadog monitors and dashboards. 19 | module DatadogBackup 20 | end 21 | -------------------------------------------------------------------------------- /lib/datadog_backup/cli.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'optparse' 4 | require 'amazing_print' 5 | 6 | module DatadogBackup 7 | # CLI is the command line interface for the datadog_backup gem. 8 | class Cli 9 | include ::DatadogBackup::Options 10 | 11 | def all_diff_futures 12 | LOGGER.info("Starting diffs on #{::DatadogBackup::ThreadPool::TPOOL.max_length} threads") 13 | any_resource_instance 14 | .all_file_ids_for_selected_resources 15 | .map do |file_id| 16 | Concurrent::Promises.future_on(::DatadogBackup::ThreadPool::TPOOL, file_id) do |fid| 17 | [fid, getdiff(fid)] 18 | end 19 | end 20 | end 21 | 22 | def any_resource_instance 23 | resource_instances.first 24 | end 25 | 26 | def backup 27 | resource_instances.each(&:purge) 28 | resource_instances.each(&:backup) 29 | any_resource_instance.all_files 30 | end 31 | 32 | def definitive_resource_instance(id) 33 | matching_resource_instance(any_resource_instance.class_from_id(id)) 34 | end 35 | 36 | def getdiff(id) 37 | result = definitive_resource_instance(id).diff(id) 38 | case result 39 | when '---' || '' || "\n" || '
' 40 | nil 41 | else 42 | result 43 | end 44 | end 45 | 46 | # rubocop:disable Style/StringConcatenation 47 | def format_diff_output(diff_output) 48 | case diff_format 49 | when nil, :color 50 | diff_output.map do |id, diff| 51 | " ---\n id: #{id}\n#{diff}" 52 | end.join("\n") 53 | when :html 54 | '' + 57 | diff_output.map do |id, diff| 58 | "

---
id: #{id}
#{diff}" 59 | end.join('
') + 60 | '' 61 | else 62 | raise 'Unexpected diff_format.' 63 | end 64 | end 65 | # rubocop:enable Style/StringConcatenation 66 | 67 | def initialize(options) 68 | @options = options 69 | end 70 | 71 | def restore 72 | futures = all_diff_futures 73 | watcher = ::DatadogBackup::ThreadPool.watcher 74 | 75 | futures.each do |future| 76 | id, diff = *future.value! 77 | next if diff.nil? || diff.empty? 78 | 79 | if @options[:force_restore] 80 | definitive_resource_instance(id).restore(id) 81 | else 82 | ask_to_restore(id, diff) 83 | end 84 | end 85 | watcher.join if watcher.status 86 | end 87 | 88 | def run! 89 | puts(send(action.to_sym)) 90 | rescue SystemExit, Interrupt 91 | ::DatadogBackup::ThreadPool.shutdown 92 | end 93 | 94 | private 95 | 96 | def ask_to_restore(id, diff) 97 | puts '--------------------------------------------------------------------------------' 98 | puts format_diff_output([id, diff]) 99 | puts '(r)estore to Datadog, overwrite local changes and (d)ownload, (s)kip, or (q)uit?' 100 | loop do 101 | response = $stdin.gets.chomp 102 | case response 103 | when 'q' 104 | exit 105 | when 'r' 106 | puts "Restoring #{id} to Datadog." 107 | definitive_resource_instance(id).restore(id) 108 | break 109 | when 'd' 110 | puts "Downloading #{id} from Datadog." 111 | definitive_resource_instance(id).get_and_write_file(id) 112 | break 113 | when 's' 114 | break 115 | else 116 | puts 'Invalid response, please try again.' 117 | end 118 | end 119 | end 120 | 121 | def matching_resource_instance(klass) 122 | resource_instances.select { |resource_instance| resource_instance.instance_of?(klass) }.first 123 | end 124 | 125 | def resource_instances 126 | @resource_instances ||= resources.map do |resource| 127 | resource.new(@options) 128 | end 129 | end 130 | end 131 | end 132 | -------------------------------------------------------------------------------- /lib/datadog_backup/dashboards.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # Dashboards specific overrides for backup and restore. 5 | class Dashboards < Resources 6 | def all 7 | get_all.fetch('dashboards') 8 | end 9 | 10 | def backup 11 | LOGGER.info("Starting diffs on #{::DatadogBackup::ThreadPool::TPOOL.max_length} threads") 12 | futures = all.map do |dashboard| 13 | Concurrent::Promises.future_on(::DatadogBackup::ThreadPool::TPOOL, dashboard) do |board| 14 | id = board[id_keyname] 15 | get_and_write_file(id) 16 | end 17 | end 18 | 19 | watcher = ::DatadogBackup::ThreadPool.watcher 20 | watcher.join if watcher.status 21 | 22 | Concurrent::Promises.zip(*futures).value! 23 | end 24 | 25 | def get_by_id(id) 26 | begin 27 | dashboard = except(get(id)) 28 | rescue Faraday::ResourceNotFound => e 29 | dashboard = {} 30 | end 31 | except(dashboard) 32 | end 33 | 34 | def initialize(options) 35 | super(options) 36 | @banlist = %w[modified_at url].freeze 37 | end 38 | 39 | private 40 | 41 | def api_version 42 | 'v1' 43 | end 44 | 45 | def api_resource_name 46 | 'dashboard' 47 | end 48 | 49 | def id_keyname 50 | 'id' 51 | end 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /lib/datadog_backup/deprecations.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # Notify the user if they are using deprecated features. 5 | module Deprecations 6 | def self.check 7 | LOGGER.warn "ruby-#{RUBY_VERSION} is deprecated. Ruby 3.1 or higher will be required to use this gem after datadog_backup@v3" if RUBY_VERSION < '3.1' 8 | end 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /lib/datadog_backup/local_filesystem.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'fileutils' 4 | require 'json' 5 | require 'yaml' 6 | require 'deepsort' 7 | 8 | module DatadogBackup 9 | ## 10 | # Meant to be mixed into DatadogBackup::Resources 11 | # Relies on @options[:backup_dir] and @options[:output_format] 12 | module LocalFilesystem 13 | def all_files 14 | ::Dir.glob(::File.join(backup_dir, '**', '*')).select { |f| ::File.file?(f) } 15 | end 16 | 17 | def all_file_ids 18 | all_files.map { |file| ::File.basename(file, '.*') } 19 | end 20 | 21 | def all_file_ids_for_selected_resources 22 | all_file_ids.select do |id| 23 | resources.include? class_from_id(id) 24 | end 25 | end 26 | 27 | def class_from_id(id) 28 | class_string = ::File.dirname(find_file_by_id(id)).split('/').last.capitalize 29 | ::DatadogBackup.const_get(class_string) 30 | end 31 | 32 | def dump(object) 33 | case output_format 34 | when :json 35 | JSON.pretty_generate(object.deep_sort(array: disable_array_sort ? false : true)) 36 | when :yaml 37 | YAML.dump(object.deep_sort(array: disable_array_sort ? false : true)).gsub('"y":','y:') 38 | else 39 | raise 'invalid output_format specified or not specified' 40 | end 41 | end 42 | 43 | def filename(id) 44 | ::File.join(mydir, "#{id}.#{output_format}") 45 | end 46 | 47 | def file_type(filepath) 48 | ::File.extname(filepath).strip.downcase[1..].to_sym 49 | end 50 | 51 | def find_file_by_id(id) 52 | ::Dir.glob(::File.join(backup_dir, '**', "#{id}.*")).first 53 | end 54 | 55 | def load_from_file(string, output_format) 56 | case output_format 57 | when :json 58 | JSON.parse(string) 59 | when :yaml 60 | YAML.safe_load(string) 61 | else 62 | raise 'invalid output_format specified or not specified' 63 | end 64 | end 65 | 66 | def load_from_file_by_id(id) 67 | filepath = find_file_by_id(id) 68 | load_from_file(::File.read(filepath), file_type(filepath)) 69 | end 70 | 71 | def mydir 72 | ::File.join(backup_dir, myclass) 73 | end 74 | 75 | def purge 76 | ::FileUtils.rm(::Dir.glob(File.join(mydir, '*'))) 77 | end 78 | 79 | def write_file(data, filename) 80 | LOGGER.info "Backing up #{filename}" 81 | file = ::File.open(filename, 'w') 82 | file.write(data) 83 | ensure 84 | file.close 85 | end 86 | end 87 | end 88 | -------------------------------------------------------------------------------- /lib/datadog_backup/monitors.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # Monitor specific overrides for backup and restore. 5 | class Monitors < Resources 6 | def all 7 | get_all 8 | end 9 | 10 | def backup 11 | all.map do |monitor| 12 | id = monitor['id'] 13 | write_file(dump(get_by_id(id)), filename(id)) 14 | end 15 | end 16 | 17 | def get_by_id(id) 18 | monitor = all.select { |m| m['id'].to_s == id.to_s }.first 19 | monitor.nil? ? {} : except(monitor) 20 | end 21 | 22 | def initialize(options) 23 | super(options) 24 | @banlist = %w[overall_state overall_state_modified matching_downtimes modified].freeze 25 | end 26 | 27 | private 28 | 29 | def api_version 30 | 'v1' 31 | end 32 | 33 | def api_resource_name 34 | 'monitor' 35 | end 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /lib/datadog_backup/options.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # Describes what the user wants to see done. 5 | module Options 6 | def action 7 | @options[:action] 8 | end 9 | 10 | def backup_dir 11 | @options[:backup_dir] 12 | end 13 | 14 | def concurrency_limit 15 | @options[:concurrency_limit] | 2 16 | end 17 | 18 | def diff_format 19 | @options[:diff_format] 20 | end 21 | 22 | # Either :json or :yaml 23 | def output_format 24 | @options[:output_format] 25 | end 26 | 27 | def resources 28 | @options[:resources] 29 | end 30 | 31 | def force_restore 32 | @options[:force_restore] 33 | end 34 | 35 | def disable_array_sort 36 | @options[:disable_array_sort] 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/datadog_backup/resources.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'diffy' 4 | require 'deepsort' 5 | require 'faraday' 6 | require 'faraday/retry' 7 | 8 | module DatadogBackup 9 | # The default options for backing up and restores. 10 | # This base class is meant to be extended by specific resources, such as Dashboards, Monitors, and so on. 11 | class Resources 12 | include ::DatadogBackup::LocalFilesystem 13 | include ::DatadogBackup::Options 14 | 15 | RETRY_OPTIONS = { 16 | max: 5, 17 | interval: 0.05, 18 | interval_randomness: 0.5, 19 | backoff_factor: 2, 20 | rate_limit_reset_header: 'x-ratelimit-reset', 21 | exceptions: [Faraday::TooManyRequestsError] + Faraday::Retry::Middleware::DEFAULT_EXCEPTIONS 22 | }.freeze 23 | 24 | def backup 25 | raise 'subclass is expected to implement #backup' 26 | end 27 | 28 | # Returns the diffy diff. 29 | # Optionally, supply an array of keys to remove from comparison 30 | def diff(id) 31 | current = except(get_by_id(id)).deep_sort(array: disable_array_sort ? false : true).to_yaml 32 | filesystem = except(load_from_file_by_id(id)).deep_sort(array: disable_array_sort ? false : true).to_yaml 33 | result = ::Diffy::Diff.new(current, filesystem, include_plus_and_minus_in_html: true).to_s(diff_format) 34 | LOGGER.debug("Compared ID #{id} and found filesystem: #{filesystem} <=> current: #{current} == result: #{result}") 35 | result.chomp 36 | end 37 | 38 | # Returns a hash with banlist elements removed 39 | def except(hash) 40 | hash.tap do # tap returns self 41 | @banlist.each do |key| 42 | hash.delete(key) # delete returns the value at the deleted key, hence the tap wrapper 43 | end 44 | end 45 | end 46 | 47 | # Fetch the specified resource from Datadog 48 | def get(id) 49 | params = {} 50 | headers = {} 51 | response = api_service.get("/api/#{api_version}/#{api_resource_name}/#{id}", params, headers) 52 | body_with_2xx(response) 53 | end 54 | 55 | # Returns a list of all resources in Datadog 56 | # Do not use directly, but use the child classes' #all method instead 57 | def get_all 58 | return @get_all if @get_all 59 | 60 | params = {} 61 | headers = {} 62 | response = api_service.get("/api/#{api_version}/#{api_resource_name}", params, headers) 63 | @get_all = body_with_2xx(response) 64 | end 65 | 66 | # Download the resource from Datadog and write it to a file 67 | def get_and_write_file(id) 68 | body = get_by_id(id) 69 | write_file(dump(body), filename(id)) 70 | body 71 | end 72 | 73 | # Fetch the specified resource from Datadog and remove the banlist elements 74 | def get_by_id(id) 75 | except(get(id)) 76 | end 77 | 78 | def initialize(options) 79 | @options = options 80 | @banlist = [] 81 | ::FileUtils.mkdir_p(mydir) 82 | end 83 | 84 | def myclass 85 | self.class.to_s.split(':').last.downcase 86 | end 87 | 88 | # Create a new resource in Datadog 89 | def create(body) 90 | headers = {} 91 | response = api_service.post("/api/#{api_version}/#{api_resource_name}", body, headers) 92 | body = body_with_2xx(response) 93 | LOGGER.warn "Successfully created #{body.fetch(id_keyname)} in datadog." 94 | LOGGER.info 'Invalidating cache' 95 | @get_all = nil 96 | body 97 | end 98 | 99 | # Update an existing resource in Datadog 100 | def update(id, body) 101 | headers = {} 102 | response = api_service.put("/api/#{api_version}/#{api_resource_name}/#{id}", body, headers) 103 | body = body_with_2xx(response) 104 | LOGGER.warn "Successfully restored #{id} to datadog." 105 | LOGGER.info 'Invalidating cache' 106 | @get_all = nil 107 | body 108 | end 109 | 110 | # If the resource exists in Datadog, update it. Otherwise, create it. 111 | def restore(id) 112 | body = load_from_file_by_id(id) 113 | begin 114 | update(id, body) 115 | rescue Faraday::ResourceNotFound => e 116 | create_newly(id, body) 117 | end 118 | end 119 | 120 | # Return the Faraday body from a response with a 2xx status code, otherwise raise an error 121 | def body_with_2xx(response) 122 | unless response.status.to_s =~ /^2/ 123 | raise "#{caller_locations(1, 124 | 1)[0].label} failed with error #{response.status}" 125 | end 126 | 127 | response.body 128 | end 129 | 130 | private 131 | 132 | def api_url 133 | ENV.fetch('DD_SITE_URL', 'https://api.datadoghq.com/') 134 | end 135 | 136 | def api_version 137 | raise 'subclass is expected to implement #api_version' 138 | end 139 | 140 | def api_resource_name 141 | raise 'subclass is expected to implement #api_resource_name' 142 | end 143 | 144 | # Some resources have a different key for the id. 145 | def id_keyname 146 | 'id' 147 | end 148 | 149 | def api_service 150 | @api_service ||= Faraday.new( 151 | url: api_url, 152 | headers: { 153 | 'DD-API-KEY' => ENV.fetch('DD_API_KEY'), 154 | 'DD-APPLICATION-KEY' => ENV.fetch('DD_APP_KEY') 155 | } 156 | ) do |faraday| 157 | faraday.request :json 158 | faraday.request :retry, RETRY_OPTIONS 159 | faraday.response(:logger, LOGGER, { headers: true, bodies: LOGGER.debug?, log_level: :debug }) do |logger| 160 | logger.filter(/(DD-API-KEY:)([^&]+)/, '\1[REDACTED]') 161 | logger.filter(/(DD-APPLICATION-KEY:)([^&]+)/, '\1[REDACTED]') 162 | end 163 | faraday.response :raise_error 164 | faraday.response :json 165 | faraday.adapter Faraday.default_adapter 166 | end 167 | end 168 | 169 | # Create a new resource in Datadog, then move the old file to the new resource's ID 170 | def create_newly(file_id, body) 171 | new_id = create(body).fetch(id_keyname) 172 | FileUtils.rm(find_file_by_id(file_id)) 173 | get_and_write_file(new_id) 174 | end 175 | end 176 | end 177 | -------------------------------------------------------------------------------- /lib/datadog_backup/slos.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # SLO specific overrides for backup and restore. 5 | class SLOs < Resources 6 | def all 7 | get_all 8 | end 9 | 10 | def backup 11 | LOGGER.info("Starting diffs on #{::DatadogBackup::ThreadPool::TPOOL.max_length} threads") 12 | futures = all.map do |slo| 13 | Concurrent::Promises.future_on(::DatadogBackup::ThreadPool::TPOOL, slo) do |board| 14 | id = board[id_keyname] 15 | get_and_write_file(id) 16 | end 17 | end 18 | 19 | watcher = ::DatadogBackup::ThreadPool.watcher 20 | watcher.join if watcher.status 21 | 22 | Concurrent::Promises.zip(*futures).value! 23 | end 24 | 25 | def get_by_id(id) 26 | begin 27 | slo = except(get(id)) 28 | rescue Faraday::ResourceNotFound => e 29 | slo = {} 30 | end 31 | except(slo) 32 | end 33 | 34 | def initialize(options) 35 | super(options) 36 | @banlist = %w[modified_at url].freeze 37 | end 38 | 39 | # Return the Faraday body from a response with a 2xx status code, otherwise raise an error 40 | def body_with_2xx(response) 41 | unless response.status.to_s =~ /^2/ 42 | raise "#{caller_locations(1, 43 | 1)[0].label} failed with error #{response.status}" 44 | end 45 | 46 | response.body.fetch('data') 47 | end 48 | 49 | private 50 | 51 | def api_version 52 | 'v1' 53 | end 54 | 55 | def api_resource_name 56 | 'slo' 57 | end 58 | 59 | def id_keyname 60 | 'id' 61 | end 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /lib/datadog_backup/synthetics.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # Synthetic specific overrides for backup and restore. 5 | class Synthetics < Resources 6 | def all 7 | get_all.fetch('tests') 8 | end 9 | 10 | def backup 11 | all.map do |synthetic| 12 | id = synthetic[id_keyname] 13 | get_and_write_file(id) 14 | end 15 | end 16 | 17 | def get_by_id(id) 18 | synthetic = all.select { |s| s[id_keyname].to_s == id.to_s }.first 19 | synthetic.nil? ? {} : except(synthetic) 20 | end 21 | 22 | def initialize(options) 23 | super(options) 24 | @banlist = %w[creator created_at modified_at monitor_id public_id].freeze 25 | end 26 | 27 | def create(body) 28 | create_api_resource_name = api_resource_name(body) 29 | headers = {} 30 | response = api_service.post("/api/#{api_version}/#{create_api_resource_name}", body, headers) 31 | resbody = body_with_2xx(response) 32 | LOGGER.warn "Successfully created #{resbody.fetch(id_keyname)} in datadog." 33 | LOGGER.info 'Invalidating cache' 34 | @get_all = nil 35 | resbody 36 | end 37 | 38 | def update(id, body) 39 | update_api_resource_name = api_resource_name(body) 40 | headers = {} 41 | response = api_service.put("/api/#{api_version}/#{update_api_resource_name}/#{id}", body, headers) 42 | resbody = body_with_2xx(response) 43 | LOGGER.warn "Successfully restored #{id} to datadog." 44 | LOGGER.info 'Invalidating cache' 45 | @get_all = nil 46 | resbody 47 | end 48 | 49 | private 50 | 51 | def api_version 52 | 'v1' 53 | end 54 | 55 | def api_resource_name(body = nil) 56 | return 'synthetics/tests' if body.nil? 57 | return 'synthetics/tests' if body['type'].nil? 58 | return 'synthetics/tests/browser' if body['type'].to_s == 'browser' 59 | return 'synthetics/tests/api' if body['type'].to_s == 'api' 60 | 61 | raise "Unknown type #{body['type']}" 62 | end 63 | 64 | def id_keyname 65 | 'public_id' 66 | end 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/datadog_backup/thread_pool.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | # Used by CLI and Dashboards to size thread pool according to available CPU resourcess. 5 | module ThreadPool 6 | TPOOL = ::Concurrent::ThreadPoolExecutor.new( 7 | min_threads: [2, Concurrent.processor_count].max, 8 | max_threads: [2, Concurrent.processor_count].max * 2, 9 | fallback_policy: :abort 10 | ) 11 | 12 | def self.watcher 13 | Thread.new(TPOOL) do |pool| 14 | while pool.queue_length.positive? 15 | sleep 2 16 | LOGGER.info("#{pool.queue_length} tasks remaining for execution.") 17 | end 18 | end 19 | end 20 | 21 | def self.shutdown 22 | LOGGER.fatal 'Shutdown signal caught. Performing orderly shut down of thread pool. Press Ctrl+C again to forcibly shut down, but be warned, DATA LOSS MAY OCCUR.' 23 | TPOOL.shutdown 24 | TPOOL.wait_for_termination 25 | rescue SystemExit, Interrupt 26 | LOGGER.fatal 'OK Nuking, DATA LOSS MAY OCCUR.' 27 | TPOOL.kill 28 | end 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /lib/datadog_backup/version.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module DatadogBackup 4 | VERSION = '4.0.2' 5 | end 6 | -------------------------------------------------------------------------------- /release.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "branches": [ 3 | '+([0-9])?(.{+([0-9]),x}).x', 4 | 'main' 5 | ], 6 | "plugins": [ 7 | "@semantic-release/commit-analyzer", 8 | "@semantic-release/release-notes-generator", 9 | [ 10 | "@semantic-release/changelog", 11 | { 12 | "changelogFile": "CHANGELOG.md" 13 | } 14 | ], 15 | [ 16 | "semantic-release-rubygem", 17 | { 18 | "gemFileDir": "." 19 | } 20 | ], 21 | [ 22 | "@semantic-release/git", 23 | { 24 | "assets": [ 25 | "CHANGELOG.md", 26 | "lib/datadog_backup/version.rb" 27 | ], 28 | "message": "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}" 29 | } 30 | ], 31 | [ 32 | "@semantic-release/github", 33 | { 34 | "assets": [ 35 | { 36 | "path": "datadog_backup.zip", 37 | "name": "datadog_backup.${nextRelease.version}.zip", 38 | "label": "Full zip distribution" 39 | }, 40 | { 41 | "path": "datadog_backup-*.gem", 42 | "label": "Gem distribution" 43 | } 44 | ] 45 | } 46 | ], 47 | ] 48 | }; 49 | -------------------------------------------------------------------------------- /spec/datadog_backup/cli_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::Cli do 6 | let(:stubs) { Faraday::Adapter::Test::Stubs.new } 7 | let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } } 8 | let(:tempdir) { Dir.mktmpdir } 9 | let(:options) do 10 | { 11 | action: 'backup', 12 | backup_dir: tempdir, 13 | diff_format: nil, 14 | output_format: :json, 15 | resources: [DatadogBackup::Dashboards] 16 | } 17 | end 18 | let(:cli) { described_class.new(options) } 19 | let(:dashboards) do 20 | dashboards = DatadogBackup::Dashboards.new(options) 21 | allow(dashboards).to receive(:api_service).and_return(api_client_double) 22 | return dashboards 23 | end 24 | 25 | before do 26 | allow(cli).to receive(:resource_instances).and_return([dashboards]) 27 | end 28 | 29 | describe '#backup' do 30 | context 'when dashboards are deleted in datadog' do 31 | let(:all_dashboards) do 32 | respond_with200( 33 | { 34 | 'dashboards' => [ 35 | { 'id' => 'stillthere' }, 36 | { 'id' => 'alsostillthere' } 37 | ] 38 | } 39 | ) 40 | end 41 | 42 | before do 43 | dashboards.write_file('{"text": "diff"}', "#{tempdir}/dashboards/stillthere.json") 44 | dashboards.write_file('{"text": "diff"}', "#{tempdir}/dashboards/alsostillthere.json") 45 | dashboards.write_file('{"text": "diff"}', "#{tempdir}/dashboards/deleted.json") 46 | 47 | stubs.get('/api/v1/dashboard') { all_dashboards } 48 | stubs.get('/api/v1/dashboard/stillthere') { respond_with200({}) } 49 | stubs.get('/api/v1/dashboard/alsostillthere') { respond_with200({}) } 50 | end 51 | 52 | it 'deletes the file locally as well' do 53 | cli.backup 54 | expect { File.open("#{tempdir}/dashboards/deleted.json", 'r') }.to raise_error(Errno::ENOENT) 55 | end 56 | end 57 | end 58 | 59 | describe '#restore' do 60 | subject(:restore) { cli.restore } 61 | let(:stdin) { class_double('STDIN') } 62 | 63 | after(:all) do 64 | $stdin = STDIN 65 | end 66 | 67 | before do 68 | $stdin = stdin 69 | dashboards.write_file('{"text": "diff"}', "#{tempdir}/dashboards/diffs1.json") 70 | allow(dashboards).to receive(:get_by_id).and_return({ 'text' => 'diff2' }) 71 | allow(dashboards).to receive(:write_file) 72 | allow(dashboards).to receive(:update) 73 | end 74 | 75 | example 'starts interactive restore' do 76 | allow(stdin).to receive(:gets).and_return('q') 77 | 78 | expect { restore }.to( 79 | output(/\(r\)estore to Datadog, overwrite local changes and \(d\)ownload, \(s\)kip, or \(q\)uit\?/).to_stdout 80 | .and(raise_error(SystemExit)) 81 | ) 82 | end 83 | 84 | context 'when the user chooses to restore' do 85 | before do 86 | allow(stdin).to receive(:gets).and_return('r') 87 | end 88 | 89 | example 'it restores from disk to server' do 90 | restore 91 | expect(dashboards).to have_received(:update).with('diffs1', { 'text' => 'diff' }) 92 | end 93 | end 94 | 95 | context 'when the user chooses to download' do 96 | before do 97 | allow(stdin).to receive(:gets).and_return('d') 98 | end 99 | 100 | example 'it writes from server to disk' do 101 | restore 102 | expect(dashboards).to have_received(:write_file).with(%({\n "text": "diff2"\n}), "#{tempdir}/dashboards/diffs1.json") 103 | end 104 | end 105 | 106 | context 'when the user chooses to skip' do 107 | before do 108 | allow(stdin).to receive(:gets).and_return('s') 109 | end 110 | 111 | example 'it does not write to disk' do 112 | restore 113 | expect(dashboards).not_to have_received(:write_file) 114 | end 115 | 116 | example 'it does not update the server' do 117 | restore 118 | expect(dashboards).not_to have_received(:update) 119 | end 120 | end 121 | 122 | context 'when the user chooses to quit' do 123 | before do 124 | allow(stdin).to receive(:gets).and_return('q') 125 | end 126 | 127 | example 'it exits' do 128 | expect { restore }.to raise_error(SystemExit) 129 | end 130 | 131 | example 'it does not write to disk' do 132 | restore 133 | rescue SystemExit 134 | expect(dashboards).not_to have_received(:write_file) 135 | end 136 | 137 | example 'it does not update the server' do 138 | restore 139 | rescue SystemExit 140 | expect(dashboards).not_to have_received(:update) 141 | end 142 | end 143 | end 144 | end 145 | -------------------------------------------------------------------------------- /spec/datadog_backup/core_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::Resources do 6 | let(:stubs) { Faraday::Adapter::Test::Stubs.new } 7 | let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } } 8 | let(:tempdir) { Dir.mktmpdir } 9 | let(:resources) do 10 | resources = described_class.new( 11 | action: 'backup', 12 | backup_dir: tempdir, 13 | diff_format: nil, 14 | resources: [], 15 | output_format: :json 16 | ) 17 | allow(resources).to receive(:api_service).and_return(api_client_double) 18 | return resources 19 | end 20 | 21 | describe '#diff' do 22 | subject(:diff) { resources.diff('diff') } 23 | 24 | before do 25 | allow(resources).to receive(:get_by_id).and_return({ 'text' => 'diff1', 'extra' => 'diff1' }) 26 | resources.write_file('{"text": "diff2", "extra": "diff2"}', "#{tempdir}/resources/diff.json") 27 | end 28 | 29 | it { 30 | expect(diff).to eq(<<~EODIFF 31 | --- 32 | -extra: diff1 33 | -text: diff1 34 | +extra: diff2 35 | +text: diff2 36 | EODIFF 37 | .chomp) 38 | } 39 | end 40 | 41 | describe '#except' do 42 | subject { resources.except({ a: :b, b: :c }) } 43 | 44 | it { is_expected.to eq({ a: :b, b: :c }) } 45 | end 46 | 47 | describe '#initialize' do 48 | subject(:myresources) { resources } 49 | 50 | it 'makes the subdirectories' do 51 | fileutils = class_double(FileUtils).as_stubbed_const 52 | allow(fileutils).to receive(:mkdir_p) 53 | myresources 54 | expect(fileutils).to have_received(:mkdir_p).with("#{tempdir}/resources") 55 | end 56 | end 57 | 58 | describe '#myclass' do 59 | subject { resources.myclass } 60 | 61 | it { is_expected.to eq 'resources' } 62 | end 63 | 64 | describe '#create' do 65 | subject(:create) { resources.create({ 'a' => 'b' }) } 66 | 67 | example 'it will post /api/v1/dashboard' do 68 | allow(resources).to receive(:api_version).and_return('v1') 69 | allow(resources).to receive(:api_resource_name).and_return('dashboard') 70 | stubs.post('/api/v1/dashboard', { 'a' => 'b' }) { respond_with200({ 'id' => 'whatever-id-abc' }) } 71 | create 72 | stubs.verify_stubbed_calls 73 | end 74 | end 75 | 76 | describe '#update' do 77 | subject(:update) { resources.update('abc-123-def', { 'a' => 'b' }) } 78 | 79 | example 'it puts /api/v1/dashboard' do 80 | allow(resources).to receive(:api_version).and_return('v1') 81 | allow(resources).to receive(:api_resource_name).and_return('dashboard') 82 | stubs.put('/api/v1/dashboard/abc-123-def', { 'a' => 'b' }) { respond_with200({ 'id' => 'whatever-id-abc' }) } 83 | update 84 | stubs.verify_stubbed_calls 85 | end 86 | 87 | context 'when the id is not found' do 88 | before do 89 | allow(resources).to receive(:api_version).and_return('v1') 90 | allow(resources).to receive(:api_resource_name).and_return('dashboard') 91 | stubs.put('/api/v1/dashboard/abc-123-def', { 'a' => 'b' }) { [404, {}, { 'id' => 'whatever-id-abc' }] } 92 | end 93 | 94 | it 'raises an error' do 95 | expect { update }.to raise_error(RuntimeError, 'update failed with error 404') 96 | end 97 | end 98 | end 99 | 100 | describe '#restore' do 101 | before do 102 | allow(resources).to receive(:api_version).and_return('api-version-string') 103 | allow(resources).to receive(:api_resource_name).and_return('api-resource-name-string') 104 | stubs.get('/api/api-version-string/api-resource-name-string/abc-123-def') { respond_with200({ 'test' => 'ok' }) } 105 | stubs.get('/api/api-version-string/api-resource-name-string/bad-123-id') do 106 | raise Faraday::ResourceNotFound 107 | end 108 | allow(resources).to receive(:load_from_file_by_id).and_return({ 'load' => 'ok' }) 109 | end 110 | 111 | context 'when id exists' do 112 | subject(:restore) { resources.restore('abc-123-def') } 113 | 114 | example 'it calls out to update' do 115 | allow(resources).to receive(:update) 116 | restore 117 | expect(resources).to have_received(:update).with('abc-123-def', { 'load' => 'ok' }) 118 | end 119 | end 120 | 121 | context 'when id does not exist on remote' do 122 | subject(:restore_newly) { resources.restore('bad-123-id') } 123 | 124 | let(:fileutils) { class_double(FileUtils).as_stubbed_const } 125 | 126 | before do 127 | allow(resources).to receive(:load_from_file_by_id).and_return({ 'load' => 'ok' }) 128 | stubs.put('/api/api-version-string/api-resource-name-string/bad-123-id') do 129 | raise Faraday::ResourceNotFound 130 | end 131 | stubs.post('/api/api-version-string/api-resource-name-string', { 'load' => 'ok' }) do 132 | respond_with200({ 'id' => 'my-new-id' }) 133 | end 134 | allow(fileutils).to receive(:rm) 135 | allow(resources).to receive(:create).with({ 'load' => 'ok' }).and_return({ 'id' => 'my-new-id' }) 136 | allow(resources).to receive(:get_and_write_file) 137 | allow(resources).to receive(:find_file_by_id).with('bad-123-id').and_return('/path/to/bad-123-id.json') 138 | end 139 | 140 | example 'it calls out to create' do 141 | restore_newly 142 | expect(resources).to have_received(:create).with({ 'load' => 'ok' }) 143 | end 144 | 145 | example 'it saves the new file' do 146 | restore_newly 147 | expect(resources).to have_received(:get_and_write_file).with('my-new-id') 148 | end 149 | 150 | example 'it deletes the old file' do 151 | restore_newly 152 | expect(fileutils).to have_received(:rm).with('/path/to/bad-123-id.json') 153 | end 154 | end 155 | end 156 | end 157 | -------------------------------------------------------------------------------- /spec/datadog_backup/dashboards_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::Dashboards do 6 | let(:stubs) { Faraday::Adapter::Test::Stubs.new } 7 | let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } } 8 | let(:tempdir) { Dir.mktmpdir } 9 | let(:dashboards) do 10 | dashboards = described_class.new( 11 | action: 'backup', 12 | backup_dir: tempdir, 13 | output_format: :json, 14 | resources: [] 15 | ) 16 | allow(dashboards).to receive(:api_service).and_return(api_client_double) 17 | return dashboards 18 | end 19 | let(:dashboard_description) do 20 | { 21 | 'description' => 'bar', 22 | 'id' => 'abc-123-def', 23 | 'title' => 'foo' 24 | } 25 | end 26 | let(:board_abc_123_def) do 27 | { 28 | 'graphs' => [ 29 | { 30 | 'definition' => { 31 | 'viz' => 'timeseries', 32 | 'requests' => [ 33 | { 34 | 'q' => 'min:foo.bar{a:b}', 35 | 'stacked' => false 36 | } 37 | ] 38 | }, 39 | 'title' => 'example graph' 40 | } 41 | ], 42 | 'description' => 'example dashboard', 43 | 'title' => 'example dashboard' 44 | } 45 | end 46 | let(:all_dashboards) { respond_with200({ 'dashboards' => [dashboard_description] }) } 47 | let(:example_dashboard) { respond_with200(board_abc_123_def) } 48 | 49 | before do 50 | stubs.get('/api/v1/dashboard') { all_dashboards } 51 | stubs.get('/api/v1/dashboard/abc-123-def') { example_dashboard } 52 | end 53 | 54 | describe '#backup' do 55 | subject { dashboards.backup } 56 | 57 | it 'is expected to create a file' do 58 | file = instance_double(File) 59 | allow(File).to receive(:open).with(dashboards.filename('abc-123-def'), 'w').and_return(file) 60 | allow(file).to receive(:write) 61 | allow(file).to receive(:close) 62 | 63 | dashboards.backup 64 | expect(file).to have_received(:write).with(::JSON.pretty_generate(board_abc_123_def.deep_sort)) 65 | end 66 | end 67 | 68 | describe '#filename' do 69 | subject { dashboards.filename('abc-123-def') } 70 | 71 | it { is_expected.to eq("#{tempdir}/dashboards/abc-123-def.json") } 72 | end 73 | 74 | describe '#get_by_id' do 75 | subject { dashboards.get_by_id('abc-123-def') } 76 | 77 | it { is_expected.to eq board_abc_123_def } 78 | end 79 | 80 | describe '#diff' do 81 | it 'calls the api only once' do 82 | dashboards.write_file('{"a":"b"}', dashboards.filename('abc-123-def')) 83 | expect(dashboards.diff('abc-123-def')).to eq(<<~EODASH 84 | --- 85 | -description: example dashboard 86 | -graphs: 87 | -- definition: 88 | - requests: 89 | - - q: min:foo.bar{a:b} 90 | - stacked: false 91 | - viz: timeseries 92 | - title: example graph 93 | -title: example dashboard 94 | +a: b 95 | EODASH 96 | .chomp) 97 | end 98 | end 99 | 100 | describe '#except' do 101 | subject { dashboards.except({ :a => :b, 'modified_at' => :c, 'url' => :d }) } 102 | 103 | it { is_expected.to eq({ a: :b }) } 104 | end 105 | end 106 | -------------------------------------------------------------------------------- /spec/datadog_backup/deprecations_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::Deprecations do 6 | subject(:check) { described_class.check } 7 | 8 | let(:logger) { instance_double(Logger) } 9 | 10 | before do 11 | stub_const('LOGGER', logger) 12 | allow(logger).to receive(:warn) 13 | end 14 | 15 | %w[2.5.9 2.6.8 2.7 3.0.4].each do |ruby_version| 16 | describe "#check#{ruby_version}" do 17 | it 'does warn' do 18 | stub_const('RUBY_VERSION', ruby_version) 19 | check 20 | expect(logger).to have_received(:warn).with(/ruby-#{ruby_version} is deprecated./) 21 | end 22 | end 23 | end 24 | 25 | %w[3.1.2 3.2.0-preview1].each do |ruby_version| 26 | describe "#check#{ruby_version}" do 27 | it 'does not warn' do 28 | stub_const('RUBY_VERSION', ruby_version) 29 | check 30 | expect(logger).not_to have_received(:warn).with(/ruby-#{ruby_version} is deprecated./) 31 | end 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /spec/datadog_backup/local_filesystem_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::LocalFilesystem do 6 | let(:tempdir) { Dir.mktmpdir } 7 | let(:resources) do 8 | DatadogBackup::Resources.new( 9 | action: 'backup', 10 | backup_dir: tempdir, 11 | resources: [DatadogBackup::Dashboards], 12 | output_format: :json 13 | ) 14 | end 15 | let(:resources_yaml) do 16 | DatadogBackup::Resources.new( 17 | action: 'backup', 18 | backup_dir: tempdir, 19 | resources: [], 20 | output_format: :yaml 21 | ) 22 | end 23 | let(:resources_disable_array_sort) do 24 | DatadogBackup::Resources.new( 25 | action: 'backup', 26 | backup_dir: tempdir, 27 | resources: [DatadogBackup::Dashboards], 28 | output_format: :json, 29 | disable_array_sort: true 30 | ) 31 | end 32 | 33 | describe '#all_files' do 34 | subject { resources.all_files } 35 | 36 | before do 37 | File.new("#{tempdir}/all_files.json", 'w') 38 | end 39 | 40 | after do 41 | FileUtils.rm "#{tempdir}/all_files.json" 42 | end 43 | 44 | it { is_expected.to eq(["#{tempdir}/all_files.json"]) } 45 | end 46 | 47 | describe '#all_file_ids_for_selected_resources' do 48 | subject { resources.all_file_ids_for_selected_resources } 49 | 50 | before do 51 | Dir.mkdir("#{tempdir}/dashboards") 52 | Dir.mkdir("#{tempdir}/monitors") 53 | File.new("#{tempdir}/dashboards/all_files.json", 'w') 54 | File.new("#{tempdir}/monitors/12345.json", 'w') 55 | end 56 | 57 | after do 58 | FileUtils.rm "#{tempdir}/dashboards/all_files.json" 59 | FileUtils.rm "#{tempdir}/monitors/12345.json" 60 | end 61 | 62 | it { is_expected.to eq(['all_files']) } 63 | end 64 | 65 | describe '#class_from_id' do 66 | subject { resources.class_from_id('abc-123-def') } 67 | 68 | before do 69 | resources.write_file('abc', "#{tempdir}/resources/abc-123-def.json") 70 | end 71 | 72 | after do 73 | FileUtils.rm "#{tempdir}/resources/abc-123-def.json" 74 | end 75 | 76 | it { is_expected.to eq DatadogBackup::Resources } 77 | end 78 | 79 | describe '#dump' do 80 | context 'when mode is :json' do 81 | subject { resources.dump({ a: :b }) } 82 | 83 | it { is_expected.to eq(%({\n "a": "b"\n})) } 84 | end 85 | 86 | context 'when mode is :yaml' do 87 | subject { resources_yaml.dump({ 'a' => 'b' }) } 88 | 89 | it { is_expected.to eq(%(---\na: b\n)) } 90 | end 91 | 92 | context 'when array sorting is enabled' do 93 | subject { resources.dump({ a: [ :c, :b ] }) } 94 | 95 | it { is_expected.to eq(%({\n \"a\": [\n \"b\",\n \"c\"\n ]\n})) } 96 | end 97 | 98 | context 'when array sorting is disabled' do 99 | subject { resources_disable_array_sort.dump({ a: [ :c, :b ] }) } 100 | 101 | it { is_expected.to eq(%({\n \"a\": [\n \"c\",\n \"b\"\n ]\n})) } 102 | end 103 | end 104 | 105 | describe '#filename' do 106 | context 'when mode is :json' do 107 | subject { resources.filename('abc-123-def') } 108 | 109 | it { is_expected.to eq("#{tempdir}/resources/abc-123-def.json") } 110 | end 111 | 112 | context 'when mode is :yaml' do 113 | subject { resources_yaml.filename('abc-123-def') } 114 | 115 | it { is_expected.to eq("#{tempdir}/resources/abc-123-def.yaml") } 116 | end 117 | end 118 | 119 | describe '#file_type' do 120 | subject { resources.file_type("#{tempdir}/file_type.json") } 121 | 122 | before do 123 | File.new("#{tempdir}/file_type.json", 'w') 124 | end 125 | 126 | after do 127 | FileUtils.rm "#{tempdir}/file_type.json" 128 | end 129 | 130 | it { is_expected.to eq :json } 131 | end 132 | 133 | describe '#find_file_by_id' do 134 | subject { resources.find_file_by_id('find_file') } 135 | 136 | before do 137 | File.new("#{tempdir}/find_file.json", 'w') 138 | end 139 | 140 | after do 141 | FileUtils.rm "#{tempdir}/find_file.json" 142 | end 143 | 144 | it { is_expected.to eq "#{tempdir}/find_file.json" } 145 | end 146 | 147 | describe '#load_from_file' do 148 | context 'when mode is :json' do 149 | subject { resources.load_from_file(%({\n "a": "b"\n}), :json) } 150 | 151 | it { is_expected.to eq('a' => 'b') } 152 | end 153 | 154 | context 'when mode is :yaml' do 155 | subject { resources.load_from_file(%(---\na: b\n), :yaml) } 156 | 157 | it { is_expected.to eq('a' => 'b') } 158 | end 159 | end 160 | 161 | describe '#load_from_file_by_id' do 162 | context 'when the backup is in json but the mode is :yaml' do 163 | subject { resources_yaml.load_from_file_by_id('abc-123-def') } 164 | 165 | before { resources.write_file(%({"a": "b"}), "#{tempdir}/resources/abc-123-def.json") } 166 | 167 | after { FileUtils.rm "#{tempdir}/resources/abc-123-def.json" } 168 | 169 | it { is_expected.to eq('a' => 'b') } 170 | end 171 | 172 | context 'when the backup is in yaml but the mode is :json' do 173 | subject { resources.load_from_file_by_id('abc-123-def') } 174 | 175 | before { resources.write_file(%(---\na: b), "#{tempdir}/resources/abc-123-def.yaml") } 176 | 177 | after { FileUtils.rm "#{tempdir}/resources/abc-123-def.yaml" } 178 | 179 | it { is_expected.to eq('a' => 'b') } 180 | end 181 | 182 | context 'with Integer as parameter' do 183 | subject { resources.load_from_file_by_id(12_345) } 184 | 185 | before { resources.write_file(%(---\na: b), "#{tempdir}/resources/12345.yaml") } 186 | 187 | after { FileUtils.rm "#{tempdir}/resources/12345.yaml" } 188 | 189 | it { is_expected.to eq('a' => 'b') } 190 | end 191 | end 192 | 193 | describe '#write_file' do 194 | subject(:write_file) { resources.write_file('abc123', "#{tempdir}/resources/abc-123-def.json") } 195 | 196 | let(:file_like_object) { instance_double(File) } 197 | 198 | it 'writes a file to abc-123-def.json' do 199 | allow(File).to receive(:open).and_call_original 200 | allow(File).to receive(:open).with("#{tempdir}/resources/abc-123-def.json", 'w').and_return(file_like_object) 201 | allow(file_like_object).to receive(:write) 202 | allow(file_like_object).to receive(:close) 203 | 204 | write_file 205 | 206 | expect(file_like_object).to have_received(:write).with('abc123') 207 | end 208 | end 209 | end 210 | -------------------------------------------------------------------------------- /spec/datadog_backup/monitors_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::Monitors do 6 | let(:stubs) { Faraday::Adapter::Test::Stubs.new } 7 | let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } } 8 | let(:tempdir) { Dir.mktmpdir } 9 | let(:monitors) do 10 | monitors = described_class.new( 11 | action: 'backup', 12 | backup_dir: tempdir, 13 | output_format: :json, 14 | resources: [] 15 | ) 16 | allow(monitors).to receive(:api_service).and_return(api_client_double) 17 | return monitors 18 | end 19 | let(:monitor_description) do 20 | { 21 | 'query' => 'bar', 22 | 'message' => 'foo', 23 | 'id' => 123_455, 24 | 'name' => 'foo', 25 | 'overall_state' => 'OK', 26 | 'overall_state_modified' => '2020-07-27T22:00:00+00:00' 27 | } 28 | end 29 | let(:clean_monitor_description) do 30 | { 31 | 'id' => 123_455, 32 | 'message' => 'foo', 33 | 'name' => 'foo', 34 | 'query' => 'bar' 35 | } 36 | end 37 | let(:all_monitors) { respond_with200([monitor_description]) } 38 | let(:example_monitor) { respond_with200(monitor_description) } 39 | 40 | before do 41 | stubs.get('/api/v1/monitor') { all_monitors } 42 | stubs.get('/api/v1/dashboard/123455') { example_monitor } 43 | end 44 | 45 | describe '#get_all' do 46 | subject { monitors.get_all } 47 | 48 | it { is_expected.to eq [monitor_description] } 49 | end 50 | 51 | describe '#backup' do 52 | subject { monitors.backup } 53 | 54 | it 'is expected to create a file' do 55 | file = instance_double(File) 56 | allow(File).to receive(:open).with(monitors.filename(123_455), 'w').and_return(file) 57 | allow(file).to receive(:write) 58 | allow(file).to receive(:close) 59 | 60 | monitors.backup 61 | expect(file).to have_received(:write).with(::JSON.pretty_generate(clean_monitor_description)) 62 | end 63 | end 64 | 65 | describe '#diff and #except' do 66 | example 'it ignores `overall_state` and `overall_state_modified`' do 67 | monitors.write_file(monitors.dump(monitor_description), monitors.filename(123_455)) 68 | stubs.get('/api/v1/dashboard/123455') do 69 | respond_with200( 70 | [ 71 | { 72 | 'query' => 'bar', 73 | 'message' => 'foo', 74 | 'id' => 123_455, 75 | 'name' => 'foo', 76 | 'overall_state' => 'ZZZZZZZZZZZZZZZZZZZZZZZZZZZ', 77 | 'overall_state_modified' => '9999-07-27T22:55:55+00:00' 78 | } 79 | ] 80 | ) 81 | end 82 | 83 | expect(monitors.diff(123_455)).to eq '' 84 | 85 | FileUtils.rm monitors.filename(123_455) 86 | end 87 | end 88 | 89 | describe '#filename' do 90 | subject { monitors.filename(123_455) } 91 | 92 | it { is_expected.to eq("#{tempdir}/monitors/123455.json") } 93 | end 94 | 95 | describe '#get_by_id' do 96 | context 'when Integer' do 97 | subject { monitors.get_by_id(123_455) } 98 | 99 | it { is_expected.to eq monitor_description } 100 | end 101 | 102 | context 'when String' do 103 | subject { monitors.get_by_id('123455') } 104 | 105 | it { is_expected.to eq monitor_description } 106 | end 107 | end 108 | end 109 | -------------------------------------------------------------------------------- /spec/datadog_backup/slos_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::SLOs do 6 | let(:stubs) { Faraday::Adapter::Test::Stubs.new } 7 | let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } } 8 | let(:tempdir) { Dir.mktmpdir } 9 | let(:slos) do 10 | slos = described_class.new( 11 | action: 'backup', 12 | backup_dir: tempdir, 13 | output_format: :json, 14 | resources: [] 15 | ) 16 | allow(slos).to receive(:api_service).and_return(api_client_double) 17 | return slos 18 | end 19 | let(:fetched_slos) do 20 | { 21 | "data"=>[ 22 | {"id"=>"abc-123", "name"=>"CI Stability", "tags"=>["kind:availability", "team:my_team"], "monitor_tags"=>[], "thresholds"=>[{"timeframe"=>"7d", "target"=>98.0, "target_display"=>"98."}, {"timeframe"=>"30d", "target"=>98.0, "target_display"=>"98."}, {"timeframe"=>"90d", "target"=>98.0, "target_display"=>"98."}], "type"=>"metric", "type_id"=>1, "description"=>"something helpful", "timeframe"=>"30d", "target_threshold"=>98.0, "query"=>{"denominator"=>"sum:metric.ci_things{*}.as_count()", "numerator"=>"sum:metric.ci_things{*}.as_count()-sum:metric.ci_things{infra_failure}.as_count()"}, "creator"=>{"name"=>"Thelma Patterson", "handle"=>"thelma.patterson@example.com", "email"=>"thelma.patterson@example.com"}, "created_at"=>1571335531, "modified_at"=>1687844157}, 23 | {"id"=>"sbc-124", "name"=>"A Latency SLO", "tags"=>["team:my_team", "kind:latency"], "monitor_tags"=>[], "thresholds"=>[{"timeframe"=>"7d", "target"=>95.0, "target_display"=>"95."}, {"timeframe"=>"30d", "target"=>95.0, "target_display"=>"95."}, {"timeframe"=>"90d", "target"=>95.0, "target_display"=>"95."}], "type"=>"monitor", "type_id"=>0, "description"=>"", "timeframe"=>"30d", "target_threshold"=>95.0, "monitor_ids"=>[13158755], "creator"=>{"name"=>"Louise Montague", "handle"=>"louise.montague@example.com", "email"=>"louise.montague@example.com"}, "created_at"=>1573162531, "modified_at"=>1685819875} 24 | ], 25 | "errors"=>[], 26 | "metadata"=>{"page"=>{"total_count"=>359, "total_filtered_count"=>359}} 27 | } 28 | end 29 | let(:slo_abc_123) do 30 | { 31 | "id" => "abc-123", 32 | "name" => "CI Stability", 33 | "tags" => [ 34 | "kind:availability", 35 | "team:my_team", 36 | ], 37 | "monitor_tags" => [], 38 | "thresholds" => [ 39 | { 40 | "timeframe" => "7d", 41 | "target" => 98.0, 42 | "target_display" => "98." 43 | }, 44 | { 45 | "timeframe" => "30d", 46 | "target" => 98.0, 47 | "target_display" => "98." 48 | }, 49 | { 50 | "timeframe" => "90d", 51 | "target" => 98.0, 52 | "target_display" => "98." 53 | } 54 | ], 55 | "type" => "metric", 56 | "type_id" => 1, 57 | "description" => "something helpful", 58 | "timeframe" => "30d", 59 | "target_threshold" => 98.0, 60 | "query" => { 61 | "denominator" => "sum:metric.ci_things{*}.as_count()", 62 | "numerator" => "sum:metric.ci_things{*}.as_count()-sum:metric.ci_things{infra_failure}.as_count()" 63 | }, 64 | "creator" => { 65 | "name" => "Thelma Patterson", 66 | "handle" => "thelma.patterson@example.com", 67 | "email" => "thelma.patterson@example.com" 68 | }, 69 | "created_at" => 1571335531, 70 | "modified_at" => 1687844157 71 | } 72 | end 73 | let(:slo_sbc_124) do 74 | { 75 | "id" => "sbc-124", 76 | "name" => "A Latency SLO", 77 | "tags" => [ 78 | "kind:latency", 79 | "team:my_team", 80 | ], 81 | "monitor_tags" => [], 82 | "thresholds" => [ 83 | { 84 | "timeframe" => "7d", 85 | "target" => 98.0, 86 | "target_display" => "98." 87 | }, 88 | { 89 | "timeframe" => "30d", 90 | "target" => 98.0, 91 | "target_display" => "98." 92 | }, 93 | { 94 | "timeframe" => "90d", 95 | "target" => 98.0, 96 | "target_display" => "98." 97 | } 98 | ], 99 | "type" => "monitor", 100 | "type_id"=>0, 101 | "description"=>"", 102 | "timeframe"=>"30d", 103 | "target_threshold"=>95.0, 104 | "monitor_ids"=>[ 13158755 ], 105 | "creator"=>{ 106 | "name"=>"Louise Montague", 107 | "handle"=>"louise.montague@example.com", 108 | "email"=>"louise.montague@example.com" 109 | }, 110 | "created_at"=>1573162531, 111 | "modified_at"=>1685819875 112 | } 113 | end 114 | let(:slo_abc_123_response) do 115 | { "data" => slo_abc_123, "errors" => [] } 116 | end 117 | let(:slo_sbc_124_response) do 118 | { "data" => slo_sbc_124, "errors" => [] } 119 | end 120 | let(:all_slos) { respond_with200(fetched_slos) } 121 | let(:example_slo1) { respond_with200(slo_abc_123_response) } 122 | let(:example_slo2) { respond_with200(slo_sbc_124_response) } 123 | 124 | before do 125 | stubs.get('/api/v1/slo') { all_slos } 126 | stubs.get('/api/v1/slo/abc-123') { example_slo1 } 127 | stubs.get('/api/v1/slo/sbc-124') { example_slo2 } 128 | end 129 | 130 | describe '#backup' do 131 | subject { slos.backup } 132 | 133 | it 'is expected to create two files' do 134 | file1 = instance_double(File) 135 | allow(File).to receive(:open).with(slos.filename('abc-123'), 'w').and_return(file1) 136 | allow(file1).to receive(:write) 137 | allow(file1).to receive(:close) 138 | 139 | file2 = instance_double(File) 140 | allow(File).to receive(:open).with(slos.filename('sbc-124'), 'w').and_return(file2) 141 | allow(file2).to receive(:write) 142 | allow(file2).to receive(:close) 143 | 144 | slos.backup 145 | expect(file1).to have_received(:write).with(::JSON.pretty_generate(slo_abc_123.deep_sort)) 146 | expect(file2).to have_received(:write).with(::JSON.pretty_generate(slo_sbc_124.deep_sort)) 147 | end 148 | end 149 | 150 | describe '#filename' do 151 | subject { slos.filename('abc-123') } 152 | 153 | it { is_expected.to eq("#{tempdir}/slos/abc-123.json") } 154 | end 155 | 156 | describe '#get_by_id' do 157 | subject { slos.get_by_id('abc-123') } 158 | 159 | it { is_expected.to eq slo_abc_123 } 160 | end 161 | 162 | describe '#diff' do 163 | it 'calls the api only once' do 164 | slos.write_file('{"a":"b"}', slos.filename('abc-123')) 165 | expect(slos.diff('abc-123')).to eq(<<~EODASH 166 | --- 167 | -created_at: 1571335531 168 | -creator: 169 | - email: thelma.patterson@example.com 170 | - handle: thelma.patterson@example.com 171 | - name: Thelma Patterson 172 | -description: something helpful 173 | -id: abc-123 174 | -monitor_tags: [] 175 | -name: CI Stability 176 | -query: 177 | - denominator: sum:metric.ci_things{*}.as_count() 178 | - numerator: sum:metric.ci_things{*}.as_count()-sum:metric.ci_things{infra_failure}.as_count() 179 | -tags: 180 | -- kind:availability 181 | -- team:my_team 182 | -target_threshold: 98.0 183 | -thresholds: 184 | -- target: 98.0 185 | - target_display: '98.' 186 | - timeframe: 30d 187 | -- target: 98.0 188 | - target_display: '98.' 189 | - timeframe: 7d 190 | -- target: 98.0 191 | - target_display: '98.' 192 | - timeframe: 90d 193 | -timeframe: 30d 194 | -type: metric 195 | -type_id: 1 196 | +a: b 197 | EODASH 198 | .chomp) 199 | end 200 | end 201 | 202 | describe '#except' do 203 | subject { slos.except({ :a => :b, 'modified_at' => :c, 'url' => :d }) } 204 | 205 | it { is_expected.to eq({ a: :b }) } 206 | end 207 | end 208 | -------------------------------------------------------------------------------- /spec/datadog_backup/synthetics_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'spec_helper' 4 | 5 | describe DatadogBackup::Synthetics do 6 | let(:stubs) { Faraday::Adapter::Test::Stubs.new } 7 | let(:api_client_double) { Faraday.new { |f| f.adapter :test, stubs } } 8 | let(:tempdir) { Dir.mktmpdir } # TODO: delete afterward 9 | let(:synthetics) do 10 | synthetics = described_class.new( 11 | action: 'backup', 12 | backup_dir: tempdir, 13 | output_format: :json, 14 | resources: [] 15 | ) 16 | allow(synthetics).to receive(:api_service).and_return(api_client_double) 17 | return synthetics 18 | end 19 | let(:api_test) do 20 | { 'config' => { 'assertions' => [{ 'operator' => 'contains', 'property' => 'set-cookie', 'target' => '_user_id', 'type' => 'header' }, 21 | { 'operator' => 'contains', 'target' => 'body message', 'type' => 'body' }, 22 | { 'operator' => 'is', 'property' => 'content-type', 'target' => 'text/html; charset=utf-8', 'type' => 'header' }, 23 | { 'operator' => 'is', 'target' => 200, 'type' => 'statusCode' }, 24 | { 'operator' => 'lessThan', 'target' => 5000, 'type' => 'responseTime' }], 25 | 'request' => { 'headers' => { 'User-Agent' => 'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:65.0) Gecko/20100101 Firefox/65.0', 26 | 'cookie' => '_a=12345; _example_session=abc123' }, 27 | 'method' => 'GET', 28 | 'url' => 'https://www.example.com/' } }, 29 | 'creator' => { 'email' => 'user@example.com', 'handle' => 'user@example.com', 'name' => 'Hugh Zer' }, 30 | 'locations' => ['aws:ap-northeast-1', 'aws:eu-central-1', 'aws:eu-west-2', 'aws:us-west-2'], 31 | 'message' => 'TEST: This is a test', 32 | 'monitor_id' => 12_345, 33 | 'name' => 'TEST: This is a test', 34 | 'options' => { 'follow_redirects' => true, 35 | 'httpVersion' => 'http1', 36 | 'min_failure_duration' => 120, 37 | 'min_location_failed' => 2, 38 | 'monitor_options' => { 'renotify_interval' => 0 }, 39 | 'monitor_priority' => 1, 40 | 'retry' => { 'count' => 1, 'interval' => 500 }, 41 | 'tick_every' => 120 }, 42 | 'public_id' => 'abc-123-def', 43 | 'status' => 'live', 44 | 'subtype' => 'http', 45 | 'tags' => ['env:test'], 46 | 'type' => 'api' } 47 | end 48 | let(:browser_test) do 49 | { 'config' => { 'assertions' => [], 50 | 'configVariables' => [], 51 | 'request' => { 'headers' => {}, 'method' => 'GET', 'url' => 'https://www.example.com' }, 52 | 'setCookie' => nil, 53 | 'variables' => [] }, 54 | 'creator' => { 'email' => 'user@example.com', 55 | 'handle' => 'user@example.com', 56 | 'name' => 'Hugh Zer' }, 57 | 'locations' => ['aws:us-east-2'], 58 | 'message' => 'Test message', 59 | 'monitor_id' => 12_345, 60 | 'name' => 'www.example.com', 61 | 'options' => { 'ci' => { 'executionRule' => 'non_blocking' }, 62 | 'device_ids' => ['chrome.laptop_large', 'chrome.mobile_small'], 63 | 'disableCors' => false, 64 | 'disableCsp' => false, 65 | 'ignoreServerCertificateError' => false, 66 | 'min_failure_duration' => 300, 67 | 'min_location_failed' => 1, 68 | 'monitor_options' => { 'renotify_interval' => 0 }, 69 | 'noScreenshot' => false, 70 | 'retry' => { 'count' => 0, 'interval' => 1000 }, 71 | 'tick_every' => 900 }, 72 | 'public_id' => '456-ghi-789', 73 | 'status' => 'live', 74 | 'tags' => ['env:test'], 75 | 'type' => 'browser' } 76 | end 77 | let(:all_synthetics) { respond_with200({ 'tests' => [api_test, browser_test] }) } 78 | let(:api_synthetic) { respond_with200(api_test) } 79 | let(:browser_synthetic) { respond_with200(browser_test) } 80 | 81 | before do 82 | stubs.get('/api/v1/synthetics/tests') { all_synthetics } 83 | stubs.get('/api/v1/synthetics/tests/api/abc-123-def') { api_synthetic } 84 | stubs.get('/api/v1/synthetics/tests/browser/456-ghi-789') { browser_synthetic } 85 | end 86 | 87 | describe '#all' do 88 | subject { synthetics.all } 89 | 90 | it { is_expected.to contain_exactly(api_test, browser_test) } 91 | end 92 | 93 | describe '#backup' do 94 | subject(:backup) { synthetics.backup } 95 | 96 | let(:apifile) { instance_double(File) } 97 | let(:browserfile) { instance_double(File) } 98 | 99 | before do 100 | allow(File).to receive(:open).with(synthetics.filename('abc-123-def'), 'w').and_return(apifile) 101 | allow(File).to receive(:open).with(synthetics.filename('456-ghi-789'), 'w').and_return(browserfile) 102 | allow(apifile).to receive(:write) 103 | allow(apifile).to receive(:close) 104 | allow(browserfile).to receive(:write) 105 | allow(browserfile).to receive(:close) 106 | end 107 | 108 | it 'is expected to write the API test' do 109 | backup 110 | expect(apifile).to have_received(:write).with(::JSON.pretty_generate(api_test)) 111 | end 112 | 113 | it 'is expected to write the browser test' do 114 | backup 115 | expect(browserfile).to have_received(:write).with(::JSON.pretty_generate(browser_test)) 116 | end 117 | end 118 | 119 | describe '#filename' do 120 | subject { synthetics.filename('abc-123-def') } 121 | 122 | it { is_expected.to eq("#{tempdir}/synthetics/abc-123-def.json") } 123 | end 124 | 125 | describe '#get_by_id' do 126 | context 'when the type is api' do 127 | subject { synthetics.get_by_id('abc-123-def') } 128 | 129 | it { is_expected.to eq api_test } 130 | end 131 | 132 | context 'when the type is browser' do 133 | subject { synthetics.get_by_id('456-ghi-789') } 134 | 135 | it { is_expected.to eq browser_test } 136 | end 137 | end 138 | 139 | describe '#diff' do # TODO: migrate to resources_spec.rb, since #diff is not defined here. 140 | subject { synthetics.diff('abc-123-def') } 141 | 142 | before do 143 | synthetics.write_file(synthetics.dump(api_test), synthetics.filename('abc-123-def')) 144 | end 145 | 146 | context 'when the test is identical' do 147 | it { is_expected.to be_empty } 148 | end 149 | 150 | context 'when the remote is not found' do 151 | subject(:invalid_diff) { synthetics.diff('invalid-id') } 152 | 153 | before do 154 | synthetics.write_file(synthetics.dump({ 'name' => 'invalid-diff' }), synthetics.filename('invalid-id')) 155 | end 156 | 157 | it { 158 | expect(invalid_diff).to eq(%(---- {}\n+---\n+name: invalid-diff)) 159 | } 160 | end 161 | 162 | context 'when there is a local update' do 163 | before do 164 | different_test = api_test.dup 165 | different_test['message'] = 'Different message' 166 | synthetics.write_file(synthetics.dump(different_test), synthetics.filename('abc-123-def')) 167 | end 168 | 169 | it { is_expected.to include(%(-message: 'TEST: This is a test'\n+message: Different message)) } 170 | end 171 | end 172 | 173 | describe '#create' do 174 | context 'when the type is api' do 175 | subject(:create) { synthetics.create({ 'type' => 'api' }) } 176 | 177 | before do 178 | stubs.post('/api/v1/synthetics/tests/api') { respond_with200({ 'public_id' => 'api-create-abc' }) } 179 | end 180 | 181 | it { is_expected.to eq({ 'public_id' => 'api-create-abc' }) } 182 | end 183 | 184 | context 'when the type is browser' do 185 | subject(:create) { synthetics.create({ 'type' => 'browser' }) } 186 | 187 | before do 188 | stubs.post('/api/v1/synthetics/tests/browser') { respond_with200({ 'public_id' => 'browser-create-abc' }) } 189 | end 190 | 191 | it { is_expected.to eq({ 'public_id' => 'browser-create-abc' }) } 192 | end 193 | end 194 | 195 | describe '#update' do 196 | context 'when the type is api' do 197 | subject(:update) { synthetics.update('api-update-abc', { 'type' => 'api' }) } 198 | 199 | before do 200 | stubs.put('/api/v1/synthetics/tests/api/api-update-abc') { respond_with200({ 'public_id' => 'api-update-abc' }) } 201 | end 202 | 203 | it { is_expected.to eq({ 'public_id' => 'api-update-abc' }) } 204 | end 205 | 206 | context 'when the type is browser' do 207 | subject(:update) { synthetics.update('browser-update-abc', { 'type' => 'browser' }) } 208 | 209 | before do 210 | stubs.put('/api/v1/synthetics/tests/browser/browser-update-abc') { respond_with200({ 'public_id' => 'browser-update-abc' }) } 211 | end 212 | 213 | it { is_expected.to eq({ 'public_id' => 'browser-update-abc' }) } 214 | end 215 | end 216 | 217 | describe '#restore' do 218 | context 'when the id exists' do 219 | subject { synthetics.restore('abc-123-def') } 220 | 221 | before do 222 | synthetics.write_file(synthetics.dump({ 'name' => 'restore-valid-id', 'type' => 'api' }), synthetics.filename('abc-123-def')) 223 | stubs.put('/api/v1/synthetics/tests/api/abc-123-def') { respond_with200({ 'public_id' => 'abc-123-def', 'type' => 'api' }) } 224 | end 225 | 226 | it { is_expected.to eq({ 'public_id' => 'abc-123-def', 'type' => 'api' }) } 227 | end 228 | 229 | context 'when the id does not exist' do 230 | subject(:restore) { synthetics.restore('restore-invalid-id') } 231 | 232 | before do 233 | synthetics.write_file(synthetics.dump({ 'name' => 'restore-invalid-id', 'type' => 'api' }), synthetics.filename('restore-invalid-id')) 234 | stubs.put('/api/v1/synthetics/tests/api/restore-invalid-id') { raise Faraday::ResourceNotFound } 235 | stubs.post('/api/v1/synthetics/tests/api') { respond_with200({ 'public_id' => 'restore-valid-id' }) } 236 | allow(synthetics).to receive(:create).and_call_original 237 | allow(synthetics).to receive(:all).and_return([api_test, browser_test, { 'public_id' => 'restore-valid-id', 'type' => 'api' }]) 238 | end 239 | 240 | it { is_expected.to eq({ 'type' => 'api' }) } 241 | 242 | it 'calls create with the contents of the original file' do 243 | restore 244 | expect(synthetics).to have_received(:create).with({ 'name' => 'restore-invalid-id', 'type' => 'api' }) 245 | end 246 | 247 | it 'deletes the original file' do 248 | restore 249 | expect(File.exist?(synthetics.filename('restore-invalid-id'))).to be false 250 | end 251 | 252 | it 'creates a new file with the restored contents' do 253 | restore 254 | expect(File.exist?(synthetics.filename('restore-valid-id'))).to be true 255 | end 256 | end 257 | end 258 | end 259 | -------------------------------------------------------------------------------- /spec/datadog_backup_bin_spec.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'open3' 4 | require 'timeout' 5 | 6 | describe 'bin/datadog_backup' do # rubocop:disable RSpec/DescribeClass 7 | # Contract Or[nil,String] => self 8 | def run_bin(env = {}, args = '') 9 | status = nil 10 | output = '' 11 | cmd = "bin/datadog_backup #{args}" 12 | Open3.popen2e(env, cmd) do |_i, oe, t| 13 | pid = t.pid 14 | 15 | Timeout.timeout(4.0) do 16 | oe.each do |v| 17 | output += v 18 | end 19 | end 20 | rescue Timeout::Error 21 | LOGGER.error "Timing out #{t.inspect} after 4 second" 22 | Process.kill(15, pid) 23 | ensure 24 | status = t.value 25 | end 26 | [output, status] 27 | end 28 | 29 | required_vars = %w[ 30 | DD_API_KEY 31 | DD_APP_KEY 32 | ] 33 | 34 | env = {} 35 | required_vars.each do |v| 36 | env[v] = v.downcase 37 | end 38 | 39 | required_vars.map do |v| 40 | it "dies unless given ENV[#{v}]" do 41 | myenv = env.dup.tap { |h| h.delete(v) } 42 | _, status = run_bin(myenv, 'backup') 43 | expect(status).not_to be_success 44 | end 45 | end 46 | 47 | describe 'help' do 48 | subject(:bin) { run_bin(env, '--help') } 49 | 50 | it 'prints usage' do 51 | out_err, _status = bin 52 | expect(out_err).to match(/Usage: DD_API_KEY=/) 53 | end 54 | 55 | it 'exits cleanly' do 56 | _out_err, status = bin 57 | expect(status).to be_success 58 | end 59 | end 60 | end 61 | -------------------------------------------------------------------------------- /spec/spec_helper.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | $LOAD_PATH.unshift(File.expand_path('../lib', File.dirname(__FILE__))) 4 | 5 | require 'logger' 6 | $stdout.sync = $stderr.sync = true 7 | LOGGER = Logger.new($stderr) 8 | LOGGER.level = Logger::ERROR 9 | $stdout = File.new('/dev/null', 'w+') 10 | 11 | require 'tmpdir' 12 | require 'datadog_backup' 13 | 14 | SPEC_ROOT = __dir__ 15 | WORK_ROOT = File.expand_path(File.join(SPEC_ROOT, '..')) 16 | 17 | RSpec.configure do |config| 18 | # rspec-expectations config goes here. You can use an alternate 19 | # assertion/expectation library such as wrong or the stdlib/minitest 20 | # assertions if you prefer. 21 | config.expect_with :rspec do |expectations| 22 | # This option will default to `true` in RSpec 4. It makes the `description` 23 | # and `failure_message` of custom matchers include text for helper methods 24 | # defined using `chain`, e.g.: 25 | # be_bigger_than(2).and_smaller_than(4).description 26 | # # => "be bigger than 2 and smaller than 4" 27 | # ...rather than: 28 | # # => "be bigger than 2" 29 | expectations.include_chain_clauses_in_custom_matcher_descriptions = true 30 | end 31 | 32 | # rspec-mocks config goes here. You can use an alternate test double 33 | # library (such as bogus or mocha) by changing the `mock_with` option here. 34 | config.mock_with :rspec do |mocks| 35 | # Prevents you from mocking or stubbing a method that does not exist on 36 | # a real object. This is generally recommended, and will default to 37 | # `true` in RSpec 4. 38 | mocks.verify_partial_doubles = true 39 | end 40 | 41 | # This option will default to `:apply_to_host_groups` in RSpec 4 (and will 42 | # have no way to turn it off -- the option exists only for backwards 43 | # compatibility in RSpec 3). It causes shared context metadata to be 44 | # inherited by the metadata hash of host groups and examples, rather than 45 | # triggering implicit auto-inclusion in groups with matching metadata. 46 | config.shared_context_metadata_behavior = :apply_to_host_groups 47 | 48 | # The settings below are suggested to provide a good initial experience 49 | # with RSpec, but feel free to customize to your heart's content. 50 | # This allows you to limit a spec run to individual examples or groups 51 | # you care about by tagging them with `:focus` metadata. When nothing 52 | # is tagged with `:focus`, all examples get run. RSpec also provides 53 | # aliases for `it`, `describe`, and `context` that include `:focus` 54 | # metadata: `fit`, `fdescribe` and `fcontext`, respectively. 55 | config.filter_run_when_matching :focus 56 | 57 | # Allows RSpec to persist some state between runs in order to support 58 | # the `--only-failures` and `--next-failure` CLI options. We recommend 59 | # you configure your source control system to ignore this file. 60 | config.example_status_persistence_file_path = 'spec/examples.txt' 61 | 62 | # Limits the available syntax to the non-monkey patched syntax that is 63 | # recommended. For more details, see: 64 | # https://relishapp.com/rspec/rspec-core/docs/configuration/zero-monkey-patching-mode 65 | config.disable_monkey_patching! 66 | 67 | # This setting enables warnings. It's recommended, but in some cases may 68 | # be too noisy due to issues in dependencies. 69 | config.warnings = true 70 | 71 | # Many RSpec users commonly either run the entire suite or an individual 72 | # file, and it's useful to allow more verbose output when running an 73 | # individual spec file. 74 | if config.files_to_run.one? 75 | # Use the documentation formatter for detailed output, 76 | # unless a formatter has already been configured 77 | # (e.g. via a command-line flag). 78 | config.default_formatter = 'doc' 79 | end 80 | 81 | # Print the 10 slowest examples and example groups at the 82 | # end of the spec run, to help surface which specs are running 83 | # particularly slow. 84 | config.profile_examples = 10 85 | 86 | # Run specs in random order to surface order dependencies. If you find an 87 | # order dependency and want to debug it, you can fix the order by providing 88 | # the seed, which is printed after each run. 89 | # --seed 1234 90 | config.order = :random 91 | 92 | # Seed global randomization in this process using the `--seed` CLI option. 93 | # Setting this allows you to use `--seed` to deterministically reproduce 94 | # test failures related to randomization by passing the same `--seed` value 95 | # as the one that triggered the failure. 96 | Kernel.srand config.seed 97 | 98 | # Make RSpec available throughout the rspec unit test suite 99 | config.expose_dsl_globally = true 100 | end 101 | 102 | def respond_with200(body) 103 | [200, {}, body] 104 | end 105 | --------------------------------------------------------------------------------