├── .changes
├── 2.3.0.md
├── 2.4.0.md
├── 2.4.1.md
├── 2.4.2.md
├── 2.5.0.md
├── 2.6.0.md
├── 2.7.0.md
├── 2.7.1.md
└── unreleased
│ └── .gitkeep
├── .changie.yaml
├── .copywrite.hcl
├── .github
├── CODEOWNERS
├── CONTRIBUTING.md
├── ISSUE_TEMPLATE
│ ├── Bug_Report.yml
│ ├── Feature_Request.yml
│ └── config.yml
├── SUPPORT.md
├── dependabot.yml
├── labeler-issue-triage.yml
├── labeler-pull-request-triage.yml
├── pull_request_template.md
└── workflows
│ ├── build.yml
│ ├── ci-changie.yml
│ ├── compliance.yml
│ ├── issue-comment-triage.yml
│ ├── issue-opened.yml
│ ├── lock.yml
│ ├── pull-request.yml
│ └── test.yml
├── .gitignore
├── .golangci.yml
├── .release
├── ci.hcl
├── release-metadata.hcl
├── security-scan.hcl
└── terraform-provider-archive-artifacts.hcl
├── CHANGELOG.md
├── DESIGN.md
├── GNUmakefile
├── LICENSE
├── META.d
└── _summary.yaml
├── README.md
├── docs
├── cdktf
│ ├── python
│ │ ├── data-sources
│ │ │ └── file.md
│ │ ├── index.md
│ │ └── resources
│ │ │ └── file.md
│ └── typescript
│ │ ├── data-sources
│ │ └── file.md
│ │ ├── index.md
│ │ └── resources
│ │ └── file.md
├── data-sources
│ └── file.md
├── index.md
└── resources
│ └── file.md
├── examples
├── data-sources
│ └── file
│ │ ├── data-source.tf
│ │ ├── lambda.tf
│ │ └── multiple-files.tf
└── resources
│ └── file
│ ├── lambda.tf
│ ├── multiple-files.tf
│ └── resource.tf
├── go.mod
├── go.sum
├── internal
├── hashcode
│ ├── hashcode.go
│ └── hashcode_test.go
└── provider
│ ├── archiver.go
│ ├── data_source_archive_file.go
│ ├── data_source_archive_file_test.go
│ ├── data_source_archive_file_tgz_test.go
│ ├── data_source_archive_file_zip_test.go
│ ├── provider.go
│ ├── provider_test.go
│ ├── resource_archive_file.go
│ ├── resource_archive_file_test.go
│ ├── resource_archive_file_tgz_test.go
│ ├── resource_archive_file_zip_test.go
│ ├── tar_archiver.go
│ ├── tar_archiver_test.go
│ ├── test-fixtures
│ ├── test-dir-with-symlink-dir
│ │ └── test-symlink-dir
│ ├── test-dir-with-symlink-file
│ │ ├── test-file.txt
│ │ └── test-symlink.txt
│ ├── test-dir
│ │ ├── test-dir1
│ │ │ ├── file1.txt
│ │ │ ├── file2.txt
│ │ │ └── file3.txt
│ │ ├── test-dir2
│ │ │ ├── file1.txt
│ │ │ ├── file2.txt
│ │ │ └── file3.txt
│ │ └── test-file.txt
│ ├── test-symlink-dir
│ └── test-symlink-dir-with-symlink-file
│ ├── zip_archiver.go
│ └── zip_archiver_test.go
├── main.go
├── templates
├── data-sources
│ └── file.md.tmpl
├── index.md.tmpl
└── resources
│ └── file.md.tmpl
├── terraform-registry-manifest.json
├── tools
├── go.mod
├── go.sum
└── tools.go
└── version
└── VERSION
/.changes/2.3.0.md:
--------------------------------------------------------------------------------
1 | ## 2.3.0 (January 18, 2023)
2 |
3 | NOTES:
4 |
5 | * Provider has been re-written using the new [`terraform-plugin-framework`](https://www.terraform.io/plugin/framework) ([#170](https://github.com/hashicorp/terraform-provider-archive/pull/170)).
6 |
7 | ## 2.2.0 (May 04, 2021)
8 |
9 | ENHANCEMENTS:
10 |
11 | * New opt-in flag to specify the `output_file_mode` to produce more deterministic behavior across operating systems. ([#90](https://github.com/terraform-providers/terraform-provider-archive/issues/90))
12 |
13 | DEPENDENCIES:
14 |
15 | * Update `github.com/hashicorp/terraform-plugin-sdk/v2` to `v2.6.1` ([#95](https://github.com/terraform-providers/terraform-provider-archive/issues/95))
16 |
17 | NOTES:
18 |
19 | Changelogs now list all dependency updates in a separate section. These are understood to have no user-facing changes except those detailed in earlier sections.
20 |
21 | ## 2.1.0 (February 19, 2021)
22 |
23 | Binary releases of this provider now include the darwin-arm64 platform. This version contains no further changes.
24 |
25 | ## 2.0.0 (October 14, 2020)
26 |
27 | Binary releases of this provider now include the linux-arm64 platform.
28 |
29 | BREAKING CHANGES:
30 |
31 | * Upgrade to version 2 of the Terraform Plugin SDK, which drops support for Terraform 0.11. This provider will continue to work as expected for users of Terraform 0.11, which will not download the new version. ([#72](https://github.com/terraform-providers/terraform-provider-archive/issues/72))
32 |
33 | BUG FIXES:
34 |
35 | * Fixed path bug with exclusions on Windows ([#71](https://github.com/terraform-providers/terraform-provider-archive/issues/71))
36 |
37 | ## 1.3.0 (September 30, 2019)
38 |
39 | NOTES:
40 |
41 | * The provider has switched to the standalone TF SDK, there should be no noticeable impact on compatibility. ([#50](https://github.com/terraform-providers/terraform-provider-archive/issues/50))
42 |
43 | ## 1.2.2 (April 30, 2019)
44 |
45 | * This release includes another Terraform SDK upgrade intended to align with that being used for other providers as we prepare for the Core v0.12.0 release. It should have no significant changes in behavior for this provider.
46 |
47 | ## 1.2.1 (April 12, 2019)
48 |
49 | * This release includes only a Terraform SDK upgrade intended to align with that being used for other providers as we prepare for the Core v0.12.0 release. It should have no significant changes in behavior for this provider.
50 |
51 | ## 1.2.0 (March 20, 2019)
52 |
53 | IMPROVEMENTS:
54 |
55 | * The provider is now compatible with Terraform v0.12, while retaining compatibility with prior versions.
56 |
57 | ## 1.1.0 (July 30, 2018)
58 |
59 | ENHANCEMENTS:
60 |
61 | * Add `excludes` to the `archive_file` data source to exclude files when using `source_dir` ([#18](https://github.com/terraform-providers/terraform-provider-archive/issues/18))
62 |
63 | BUG FIXES:
64 |
65 | * Fix zip file path names to use forward slash on Windows ([#25](https://github.com/terraform-providers/terraform-provider-archive/issues/25))
66 | * Fix panic in `filepath.Walk` call ([#26](https://github.com/terraform-providers/terraform-provider-archive/issues/26))
67 |
68 | ## 1.0.3 (March 23, 2018)
69 |
70 | BUG FIXES:
71 |
72 | * Fix modified time affecting zip contents and causing spurious diffs ([#16](https://github.com/terraform-providers/terraform-provider-archive/issues/16))
73 |
74 | ## 1.0.2 (March 16, 2018)
75 |
76 | BUG FIXES:
77 |
78 | * Fix issue with flags not being copied on a single file and regression introduced in 1.0.1 ([#13](https://github.com/terraform-providers/terraform-provider-archive/issues/13))
79 |
80 | ## 1.0.1 (March 13, 2018)
81 |
82 | BUG FIXES:
83 |
84 | * Fix issue with flags not being copied in to archive ([#9](https://github.com/terraform-providers/terraform-provider-archive/issues/9))
85 |
86 | ## 1.0.0 (September 15, 2017)
87 |
88 | * No changes from 0.1.0; just adjusting to [the new version numbering scheme](https://www.hashicorp.com/blog/hashicorp-terraform-provider-versioning/).
89 |
90 | ## 0.1.0 (June 20, 2017)
91 |
92 | NOTES:
93 |
94 | * Same functionality as that of Terraform 0.9.8. Repacked as part of [Provider Splitout](https://www.hashicorp.com/blog/upcoming-provider-changes-in-terraform-0-10/)
95 |
--------------------------------------------------------------------------------
/.changes/2.4.0.md:
--------------------------------------------------------------------------------
1 | ## 2.4.0 (June 07, 2023)
2 |
3 | NOTES:
4 |
5 | * This Go module has been updated to Go 1.19 per the [Go support policy](https://golang.org/doc/devel/release.html#policy). Any consumers building on earlier Go versions may experience errors. ([#200](https://github.com/hashicorp/terraform-provider-archive/issues/200))
6 |
7 | ENHANCEMENTS:
8 |
9 | * data-source/archive_file: Added attribute `exclude_symlink_directories` which will exclude symbolically linked directories from the archive when set to true. Defaults to false ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
10 | * resource/archive_file: Added attribute `exclude_symlink_directories` which will exclude symbolically linked directories from the archive when set to true. Defaults to false ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
11 |
12 | BUG FIXES:
13 |
14 | * data-source/archive_file: Symbolically linked directories are included in archives by default rather than generating an error ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
15 | * resource/archive_file: Symbolically linked directories are included in archives by default rather than generating an error ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
16 |
--------------------------------------------------------------------------------
/.changes/2.4.1.md:
--------------------------------------------------------------------------------
1 | ## 2.4.1 (December 18, 2023)
2 |
3 | NOTES:
4 |
5 | * This release introduces no functional changes. It does however include dependency updates which address upstream CVEs. ([#287](https://github.com/hashicorp/terraform-provider-archive/issues/287))
6 |
7 |
--------------------------------------------------------------------------------
/.changes/2.4.2.md:
--------------------------------------------------------------------------------
1 | ## 2.4.2 (January 24, 2024)
2 |
3 | BUG FIXES:
4 |
5 | * data-source/archive_file: Prevent error when generating archive from source containing symbolically linked directories, and `exclude_symlink_directories` is set to true ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
6 | * resource/archive_file: Prevent error when generating archive from source containing symbolically linked directories, and `exclude_symlink_directories` is set to true ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
7 | * resource/archive_file: Return error when generated archive would be empty ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
8 | * data-source/archive_file: Return error when generated archive would be empty ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
9 |
10 |
--------------------------------------------------------------------------------
/.changes/2.5.0.md:
--------------------------------------------------------------------------------
1 | ## 2.5.0 (July 31, 2024)
2 |
3 | ENHANCEMENTS:
4 |
5 | * data-source/archive_file: Add glob pattern matching support to the `excludes` attribute. ([#354](https://github.com/hashicorp/terraform-provider-archive/issues/354))
6 | * resource/archive_file: Add glob pattern matching support to the `excludes` attribute. ([#354](https://github.com/hashicorp/terraform-provider-archive/issues/354))
7 |
8 |
--------------------------------------------------------------------------------
/.changes/2.6.0.md:
--------------------------------------------------------------------------------
1 | ## 2.6.0 (September 09, 2024)
2 |
3 | FEATURES:
4 |
5 | * data-source/archive_file: Add support for creating `tar.gz` archive files. ([#277](https://github.com/hashicorp/terraform-provider-archive/issues/277))
6 | * resource/archive_file: Add support for creating `tar.gz` archive files. ([#277](https://github.com/hashicorp/terraform-provider-archive/issues/277))
7 |
8 |
--------------------------------------------------------------------------------
/.changes/2.7.0.md:
--------------------------------------------------------------------------------
1 | ## 2.7.0 (December 05, 2024)
2 |
3 | FEATURES:
4 |
5 | * resource/archive_file: Remove `deprecated` status ([#218](https://github.com/hashicorp/terraform-provider-archive/issues/218))
6 |
7 |
--------------------------------------------------------------------------------
/.changes/2.7.1.md:
--------------------------------------------------------------------------------
1 | ## 2.7.1 (May 12, 2025)
2 |
3 | NOTES:
4 |
5 | * Update dependencies: ([#425](https://github.com/hashicorp/terraform-provider-archive/pull/425))
6 |
--------------------------------------------------------------------------------
/.changes/unreleased/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hashicorp/terraform-provider-archive/b8ace88906288c5d3950c34796468ebf052fbde3/.changes/unreleased/.gitkeep
--------------------------------------------------------------------------------
/.changie.yaml:
--------------------------------------------------------------------------------
1 | # DO NOT EDIT - This GitHub Workflow is managed by automation
2 | # https://github.com/hashicorp/terraform-devex-repos
3 | changesDir: .changes
4 | unreleasedDir: unreleased
5 | changelogPath: CHANGELOG.md
6 | versionExt: md
7 | versionFormat: '## {{.Version}} ({{.Time.Format "January 02, 2006"}})'
8 | kindFormat: '{{.Kind}}:'
9 | changeFormat: '* {{.Body}} ([#{{.Custom.Issue}}](https://github.com/hashicorp/terraform-provider-archive/issues/{{.Custom.Issue}}))'
10 | custom:
11 | - key: Issue
12 | label: Issue/PR Number
13 | type: int
14 | minInt: 1
15 | kinds:
16 | - label: BREAKING CHANGES
17 | - label: NOTES
18 | - label: FEATURES
19 | - label: ENHANCEMENTS
20 | - label: BUG FIXES
21 | newlines:
22 | afterKind: 1
23 | beforeKind: 1
24 | endOfVersion: 2
25 |
--------------------------------------------------------------------------------
/.copywrite.hcl:
--------------------------------------------------------------------------------
1 | schema_version = 1
2 |
3 | project {
4 | license = "MPL-2.0"
5 | copyright_year = 2017
6 |
7 | header_ignore = [
8 | # internal catalog metadata (prose)
9 | "META.d/**/*.yaml",
10 |
11 | # changie tooling configuration and CHANGELOG entries (prose)
12 | ".changes/unreleased/*.yaml",
13 | ".changie.yaml",
14 |
15 | # examples used within documentation (prose)
16 | "examples/**",
17 |
18 | # GitHub issue template configuration
19 | ".github/ISSUE_TEMPLATE/*.yml",
20 |
21 | # GitHub Actions workflow-specific configurations
22 | ".github/labeler-*.yml",
23 |
24 | # golangci-lint tooling configuration
25 | ".golangci.yml",
26 |
27 | # Release Engineering tooling configuration
28 | ".release/*.hcl",
29 | ]
30 | }
31 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @hashicorp/terraform-devex
2 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Thank you for investing your time and energy by contributing to our project: please ensure you are familiar
4 | with the [HashiCorp Code of Conduct](https://github.com/hashicorp/.github/blob/master/CODE_OF_CONDUCT.md).
5 |
6 | This provider is a HashiCorp **utility provider**, which means any bug fix and feature
7 | has to be considered in the context of the thousands/millions of configurations in which this provider is used.
8 | This is great as your contribution can have a big positive impact, but we have to assess potential negative impact too
9 | (e.g. breaking existing configurations). _Stability over features_.
10 |
11 | To provide some safety to the wider provider ecosystem, we strictly follow
12 | [semantic versioning](https://semver.org/) and HashiCorp's own
13 | [versioning specification](https://www.terraform.io/plugin/sdkv2/best-practices/versioning#versioning-specification).
14 | Any changes that could be considered as breaking will only be included as part of a major release.
15 | In case multiple breaking changes need to happen, we will group them in the next upcoming major release.
16 |
17 | ## Asking Questions
18 |
19 | For questions, curiosity, or if still unsure what you are dealing with,
20 | please see the HashiCorp [Terraform Providers Discuss](https://discuss.hashicorp.com/c/terraform-providers/31)
21 | forum.
22 |
23 | ## Reporting Vulnerabilities
24 |
25 | Please disclose security vulnerabilities responsibly by following the
26 | [HashiCorp Vulnerability Reporting guidelines](https://www.hashicorp.com/security#vulnerability-reporting).
27 |
28 | ## Understanding the design
29 |
30 | Before proceeding with raising issues or submitting pull requests, it will probably help to familiarise yourself with
31 | the [design principles](../DESIGN.md) of this provider. This will aid your proposals, and help understand
32 | why we took certain decisions during development.
33 |
34 | ## Raising Issues
35 |
36 | We welcome issues of all kinds including feature requests, bug reports or documentation suggestions.
37 | Below are guidelines for well-formed issues of each type.
38 |
39 | ### Bug Reports
40 |
41 | * [ ] **Test against latest release**: Make sure you test against the latest available version of Terraform and the provider.
42 | It is possible we may have already fixed the bug you're experiencing.
43 | * [ ] **Search for duplicates**: It's helpful to keep bug reports consolidated to one thread, so do a quick search
44 | on existing bug reports to check if anybody else has reported the same thing.
45 | You can scope searches by the label `bug` to help narrow things down.
46 | * [ ] **Include steps to reproduce**: Provide steps to reproduce the issue, along with code examples and/or real code,
47 | so we can try to reproduce it. Without this, it makes it much harder (sometimes impossible) to fix the issue.
48 |
49 | ### Feature Requests
50 |
51 | * [ ] **Search for possible duplicate requests**: It's helpful to keep requests consolidated to one thread,
52 | so do a quick search on existing requests to check if anybody else has reported the same thing.
53 | You can scope searches by the label `enhancement` to help narrow things down.
54 | * [ ] **Include a use case description**: In addition to describing the behavior of the feature you'd like to see added,
55 | it's helpful to also make a case for why the feature would be important and how it would benefit
56 | the provider and, potentially, the wider Terraform ecosystem.
57 |
58 | ## New Pull Request
59 |
60 | Thank you for contributing!
61 |
62 | We are happy to review pull requests without associated issues,
63 | but we **highly recommend** starting by describing and discussing
64 | your problem or feature and attaching use cases to an issue first
65 | before raising a pull request.
66 |
67 | * [ ] **Early validation of idea and implementation plan**: provider development is complicated enough that there
68 | are often several ways to implement something, each of which has different implications and tradeoffs.
69 | Working through a plan of attack with the team before you dive into implementation will help ensure that you're
70 | working in the right direction.
71 | * [ ] **Tests**: It may go without saying, but every new patch should be covered by tests wherever possible.
72 | For bug-fixes, tests to prove the fix is valid. For features, tests to exercise the new code paths.
73 | * [ ] **Go Modules**: We use [Go Modules](https://github.com/golang/go/wiki/Modules) to manage and version our dependencies.
74 | Please make sure that you reflect dependency changes in your pull requests appropriately
75 | (e.g. `go get`, `go mod tidy` or other commands).
76 | Refer to the [dependency updates](#dependency-updates) section for more information about how
77 | this project maintains existing dependencies.
78 | * [ ] **Changelog**: Refer to the [changelog](#changelog) section for more information about how to create changelog entries.
79 | * [ ] **License Headers**: All source code requires a license header at the top of the file, refer to [License Headers](#license-headers) for information on how to autogenerate these headers.
80 |
81 | ### Dependency Updates
82 |
83 | Dependency management is performed by [Dependabot](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates).
84 | Where possible, dependency updates should occur through that system to ensure all Go module files are appropriately
85 | updated and to prevent duplicated effort of concurrent update submissions.
86 | Once available, updates are expected to be verified and merged to prevent latent technical debt.
87 |
88 | ### Changelog
89 |
90 | HashiCorp’s open-source projects have always maintained user-friendly, readable `CHANGELOG`s that allow
91 | practitioners and developers to tell at a glance whether a release should have any effect on them,
92 | and to gauge the risk of an upgrade.
93 |
94 | We follow Terraform Plugin
95 | [changelog specifications](https://www.terraform.io/plugin/sdkv2/best-practices/versioning#changelog-specification).
96 |
97 | #### Changie Automation Tool
98 | This project uses the [Changie](https://changie.dev/) automation tool for changelog automation.
99 |
100 | To add a new entry to the `CHANGELOG`, install Changie using the following [instructions](https://changie.dev/guide/installation/)
101 |
102 | After Changie is installed on your local machine, run:
103 | ```bash
104 | changie new
105 | ```
106 | and choose a `kind` of change corresponding to the Terraform Plugin [changelog categories](https://developer.hashicorp.com/terraform/plugin/sdkv2/best-practices/versioning#categorization)
107 |
108 | Fill out the body field following the entry format. Changie will then prompt for a Github issue or pull request number.
109 |
110 | Repeat this process for any additional changes. The `.yaml` files created in the `.changes/unreleased` folder
111 | should be pushed the repository along with any code changes.
112 |
113 | #### Entry format
114 |
115 | Entries that are specific to _resources_ or _data sources_, they should look like:
116 |
117 | ```markdown
118 | * resource/RESOURCE_NAME: ENTRY DESCRIPTION.
119 |
120 | * data-source/DATA-SOURCE_NAME: ENTRY DESCRIPTION.
121 | ```
122 |
123 | #### Which changes should appear in the `CHANGELOG`?
124 |
125 | The `CHANGELOG` is intended to show developer-impacting changes to the codebase for a particular version.
126 | If every change or commit to the code resulted in an entry, the `CHANGELOG` would become less useful for developers.
127 | The lists below are general guidelines to decide whether a change should have an entry.
128 |
129 | ##### Changes that _should not_ have a `CHANGELOG` entry
130 |
131 | * Documentation updates
132 | * Testing updates
133 | * Code refactoring
134 |
135 | ##### Changes that _may_ have a `CHANGELOG` entry
136 |
137 | * Dependency updates: If the update contains relevant bug fixes or enhancements that affect developers,
138 | those should be called out.
139 |
140 | ##### Changes that _should_ have a `CHANGELOG` entry
141 |
142 | * Major features
143 | * Bug fixes
144 | * Enhancements
145 | * Deprecations
146 | * Breaking changes and removals
147 |
148 | ### License Headers
149 |
150 | All source code files (excluding autogenerated files like `go.mod`, prose, and files excluded in [.copywrite.hcl](../.copywrite.hcl)) must have a license header at the top.
151 |
152 | This can be autogenerated by running `make generate` or running `go generate ./...` in the [/tools](../tools) directory.
153 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/Bug_Report.yml:
--------------------------------------------------------------------------------
1 | name: Bug Report
2 | description: Something is incorrect or not working as expected.
3 | labels: ["bug"]
4 | body:
5 | - type: markdown
6 | attributes:
7 | value: |
8 | Thank you for taking the time to fill out this bug report! Please note that this issue tracker is only used for bug reports and feature requests. Other issues will be closed.
9 |
10 | If you have a configuration, workflow, or other question, please go back to the issue chooser and select one of the question links.
11 | - type: textarea
12 | id: versions
13 | attributes:
14 | label: Terraform CLI and Provider Versions
15 | description: What versions of Terraform CLI and the provider?
16 | placeholder: Output of `terraform version` from configuration directory
17 | validations:
18 | required: true
19 | - type: textarea
20 | id: terraform-configuration
21 | attributes:
22 | label: Terraform Configuration
23 | description: Please copy and paste any relevant Terraform configuration. This will be automatically formatted into code, so no need for backticks.
24 | render: terraform
25 | validations:
26 | required: true
27 | - type: textarea
28 | id: expected-behavior
29 | attributes:
30 | label: Expected Behavior
31 | description: What did you expect to happen?
32 | placeholder: Description of what should have happened.
33 | validations:
34 | required: true
35 | - type: textarea
36 | id: actual-behavior
37 | attributes:
38 | label: Actual Behavior
39 | description: What actually happened?
40 | placeholder: Description of what actually happened.
41 | validations:
42 | required: true
43 | - type: textarea
44 | id: reproduction-steps
45 | attributes:
46 | label: Steps to Reproduce
47 | description: List of steps to reproduce the issue.
48 | value: |
49 | 1. `terraform apply`
50 | validations:
51 | required: true
52 | - type: dropdown
53 | id: impact
54 | attributes:
55 | label: How much impact is this issue causing?
56 | description: High represents completely not able to use the provider or unexpected destruction of data/infrastructure. Medium represents unable to upgrade provider version or an issue with potential workaround. Low represents minor provider code, configuration, or documentation issues.
57 | options:
58 | - High
59 | - Medium
60 | - Low
61 | validations:
62 | required: true
63 | - type: input
64 | id: logs
65 | attributes:
66 | label: Logs
67 | description: Please provide a link to a [GitHub Gist](https://gist.github.com) containing TRACE log output. [Terraform Debugging Documentation](https://www.terraform.io/internals/debugging)
68 | placeholder: https://gist.github.com/example/12345678
69 | validations:
70 | required: false
71 | - type: textarea
72 | id: additional-information
73 | attributes:
74 | label: Additional Information
75 | description: Are there any additional details about your environment, workflow, or recent changes that might be relevant? Have you discovered a workaround? Are there links to other related issues?
76 | validations:
77 | required: false
78 | - type: checkboxes
79 | id: terms
80 | attributes:
81 | label: Code of Conduct
82 | description: By submitting this issue, you agree to follow our [Community Guidelines](https://www.hashicorp.com/community-guidelines).
83 | options:
84 | - label: I agree to follow this project's Code of Conduct
85 | required: true
86 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/Feature_Request.yml:
--------------------------------------------------------------------------------
1 | name: Feature Request
2 | description: Something is missing or could be improved.
3 | labels: ["enhancement"]
4 | body:
5 | - type: markdown
6 | attributes:
7 | value: |
8 | Thank you for taking the time to fill out this feature request! Please note that this issue tracker is only used for bug reports and feature requests. Other issues will be closed.
9 |
10 | If you have a configuration, workflow, or other question, please go back to the issue chooser and select one of the question links.
11 | - type: textarea
12 | id: versions
13 | attributes:
14 | label: Terraform CLI and Provider Versions
15 | description: What versions of Terraform CLI and the provider?
16 | placeholder: Output of `terraform version` from configuration directory
17 | validations:
18 | required: true
19 | - type: textarea
20 | id: use-case
21 | attributes:
22 | label: Use Cases or Problem Statement
23 | description: What use cases or problems are you trying to solve?
24 | placeholder: Description of use cases or problems.
25 | validations:
26 | required: true
27 | - type: textarea
28 | id: proposal
29 | attributes:
30 | label: Proposal
31 | description: What solutions would you prefer?
32 | placeholder: Description of proposed solutions.
33 | validations:
34 | required: true
35 | - type: dropdown
36 | id: impact
37 | attributes:
38 | label: How much impact is this issue causing?
39 | description: High represents completely not able to use the provider without this. Medium represents unable to solve a specific problem or understand something. Low represents minor provider code, configuration, or documentation issues.
40 | options:
41 | - High
42 | - Medium
43 | - Low
44 | validations:
45 | required: true
46 | - type: textarea
47 | id: additional-information
48 | attributes:
49 | label: Additional Information
50 | description: Are there any additional details about your environment, workflow, or recent changes that might be relevant? Have you discovered a workaround? Are there links to other related issues?
51 | validations:
52 | required: false
53 | - type: checkboxes
54 | id: terms
55 | attributes:
56 | label: Code of Conduct
57 | description: By submitting this issue, you agree to follow our [Community Guidelines](https://www.hashicorp.com/community-guidelines).
58 | options:
59 | - label: I agree to follow this project's Code of Conduct
60 | required: true
61 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Terraform Provider Questions
4 | url: https://discuss.hashicorp.com/c/terraform-providers/31
5 | about: GitHub issues in this repository are only intended for bug reports and feature requests. Other issues will be closed. Please ask and answer questions through the Terraform Provider section of HashiCorp Discuss.
6 | - name: Terraform Language or Workflow Questions
7 | url: https://discuss.hashicorp.com/c/terraform-core
8 | about: Please ask and answer language or workflow related questions through the Terraform Core section of HashiCorp Discuss.
9 |
--------------------------------------------------------------------------------
/.github/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # Support
2 |
3 | * Project [README](../README.md).
4 | * Official [Documentation](https://registry.terraform.io/providers/hashicorp/archive/latest/docs).
5 | * Provider [Discussion forums](https://discuss.hashicorp.com/c/terraform-providers/31).
6 | * Terraform [Community](https://www.terraform.io/community.html).
7 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # See GitHub's docs for more information on this file:
2 | # https://docs.github.com/en/free-pro-team@latest/github/administering-a-repository/configuration-options-for-dependency-updates
3 | version: 2
4 | updates:
5 | # Maintain dependencies for Go modules
6 | - package-ecosystem: "gomod"
7 | directory: "/"
8 | schedule:
9 | # Check for updates to Go modules every weekday
10 | interval: "daily"
11 | - package-ecosystem: "gomod"
12 | directory: "/tools"
13 | schedule:
14 | interval: "daily"
15 | - package-ecosystem: "github-actions"
16 | directory: "/"
17 | schedule:
18 | interval: "daily"
19 |
--------------------------------------------------------------------------------
/.github/labeler-issue-triage.yml:
--------------------------------------------------------------------------------
1 | bug:
2 | - 'panic:'
3 | crash:
4 | - 'panic:'
5 |
--------------------------------------------------------------------------------
/.github/labeler-pull-request-triage.yml:
--------------------------------------------------------------------------------
1 | dependencies:
2 | - changed-files:
3 | - any-glob-to-any-file: .github/dependabot.yml
4 | - any-glob-to-any-file: go.mod
5 | - any-glob-to-any-file: go.sum
6 | documentation:
7 | - changed-files:
8 | - any-glob-to-any-file: website/**/*
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Related Issue
2 |
3 | Fixes #
4 |
5 | ## Description
6 |
7 | In plain English, describe your approach to addressing the issue linked above. For example, if you made a particular design decision, let us know why you chose this path instead of another solution.
8 |
9 |
10 | ## Rollback Plan
11 |
12 | - [ ] If a change needs to be reverted, we will roll out an update to the code within 7 days.
13 |
14 | ## Changes to Security Controls
15 |
16 | Are there any changes to security controls (access controls, encryption, logging) in this pull request? If so, explain.
17 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | # This workflow builds the product for all supported platforms and uploads the resulting
2 | # binaries as Actions artifacts. The workflow also uploads a build metadata file
3 | # (metadata.json) -- and a Terraform Registry manifest file (terraform-registry-manifest.json).
4 | #
5 | # Reference: https://github.com/hashicorp/terraform-provider-crt-example/blob/main/.github/workflows/README.md
6 | #
7 |
8 | name: build
9 |
10 | # We default to running this workflow on every push to every branch.
11 | # This provides fast feedback when build issues occur, so they can be
12 | # fixed prior to being merged to the main branch.
13 | #
14 | # If you want to opt out of this, and only run the build on certain branches
15 | # please refer to the documentation on branch filtering here:
16 | #
17 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onpushbranchestagsbranches-ignoretags-ignore
18 | #
19 | on: [workflow_dispatch, push]
20 |
21 | env:
22 | PKG_NAME: "terraform-provider-archive"
23 |
24 | jobs:
25 | # Detects the Go toolchain version to use for product builds.
26 | #
27 | # The implementation is inspired by envconsul -- https://go.hashi.co/get-go-version-example
28 | get-go-version:
29 | name: "Detect Go toolchain version"
30 | runs-on: ubuntu-latest
31 | outputs:
32 | go-version: ${{ steps.get-go-version.outputs.go-version }}
33 | steps:
34 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
35 | - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
36 | with:
37 | go-version-file: 'go.mod'
38 | - name: Detect Go version
39 | id: get-go-version
40 | run: |
41 | version="$(go list -f {{.GoVersion}} -m)"
42 | echo "go-version=$version" >> "$GITHUB_OUTPUT"
43 |
44 | # Parses the version/VERSION file. Reference: https://github.com/hashicorp/actions-set-product-version/blob/main/README.md
45 | #
46 | # > This action should be implemented in product repo `build.yml` files. The action is intended to grab the version
47 | # > from the version file at the beginning of the build, then passes those versions (along with metadata, where
48 | # > necessary) to any workflow jobs that need version information.
49 | set-product-version:
50 | name: "Parse version file"
51 | runs-on: ubuntu-latest
52 | outputs:
53 | product-version: ${{ steps.set-product-version.outputs.product-version }}
54 | product-base-version: ${{ steps.set-product-version.outputs.base-product-version }}
55 | product-prerelease-version: ${{ steps.set-product-version.outputs.prerelease-product-version }}
56 | product-minor-version: ${{ steps.set-product-version.outputs.minor-product-version }}
57 | steps:
58 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
59 | - name: Set variables
60 | id: set-product-version
61 | uses: hashicorp/actions-set-product-version@v2
62 |
63 | # Creates metadata.json file containing build metadata for consumption by CRT workflows.
64 | #
65 | # Reference: https://github.com/hashicorp/actions-generate-metadata/blob/main/README.md
66 | generate-metadata-file:
67 | needs: set-product-version
68 | runs-on: ubuntu-latest
69 | outputs:
70 | filepath: ${{ steps.generate-metadata-file.outputs.filepath }}
71 | steps:
72 | - name: "Checkout directory"
73 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
74 | - name: Generate metadata file
75 | id: generate-metadata-file
76 | uses: hashicorp/actions-generate-metadata@v1
77 | with:
78 | version: ${{ needs.set-product-version.outputs.product-version }}
79 | product: ${{ env.PKG_NAME }}
80 | repositoryOwner: "hashicorp"
81 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
82 | with:
83 | name: metadata.json
84 | path: ${{ steps.generate-metadata-file.outputs.filepath }}
85 |
86 | # Uploads an Actions artifact named terraform-registry-manifest.json.zip.
87 | #
88 | # The artifact contains a single file with a filename that Terraform Registry expects
89 | # (example: terraform-provider-crt-example_2.3.6-alpha1_manifest.json). The file contents
90 | # are identical to the terraform-registry-manifest.json file in the source repository.
91 | upload-terraform-registry-manifest-artifact:
92 | needs: set-product-version
93 | runs-on: ubuntu-latest
94 | steps:
95 | - name: "Checkout directory"
96 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
97 | with:
98 | path: ${{ env.PKG_NAME }}
99 | - name: "Copy manifest from checkout directory to a file with the desired name"
100 | id: terraform-registry-manifest
101 | run: |
102 | name="${{ env.PKG_NAME }}"
103 | version="${{ needs.set-product-version.outputs.product-version }}"
104 |
105 | source="${name}/terraform-registry-manifest.json"
106 | destination="${name}_${version}_manifest.json"
107 |
108 | cp "$source" "$destination"
109 | echo "filename=$destination" >> "$GITHUB_OUTPUT"
110 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
111 | with:
112 | name: terraform-registry-manifest.json
113 | path: ${{ steps.terraform-registry-manifest.outputs.filename }}
114 | if-no-files-found: error
115 |
116 | # Builds the product for all platforms except macOS.
117 | #
118 | # With `reproducible: report`, this job also reports whether the build is reproducible,
119 | # but does not enforce it.
120 | #
121 | # Reference: https://github.com/hashicorp/actions-go-build/blob/main/README.md
122 | build:
123 | needs:
124 | - get-go-version
125 | - set-product-version
126 | runs-on: ubuntu-latest
127 | strategy:
128 | fail-fast: true
129 | # Verify expected Artifacts list for a workflow run.
130 | matrix:
131 | goos: [freebsd, windows, linux, darwin]
132 | goarch: ["386", "amd64", "arm", "arm64"]
133 | exclude:
134 | - goos: freebsd
135 | goarch: arm64
136 | - goos: windows
137 | goarch: arm64
138 | - goos: windows
139 | goarch: arm
140 | - goos: darwin
141 | goarch: 386
142 | - goos: darwin
143 | goarch: arm
144 |
145 | name: Go ${{ needs.get-go-version.outputs.go-version }} ${{ matrix.goos }} ${{ matrix.goarch }} build
146 | steps:
147 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
148 | - uses: hashicorp/actions-go-build@v1
149 | env:
150 | CGO_ENABLED: 0
151 | BASE_VERSION: ${{ needs.set-product-version.outputs.product-base-version }}
152 | PRERELEASE_VERSION: ${{ needs.set-product-version.outputs.product-prerelease-version}}
153 | METADATA_VERSION: ${{ env.METADATA }}
154 | with:
155 | bin_name: "${{ env.PKG_NAME }}_v${{ needs.set-product-version.outputs.product-version }}_x5"
156 | product_name: ${{ env.PKG_NAME }}
157 | product_version: ${{ needs.set-product-version.outputs.product-version }}
158 | go_version: ${{ needs.get-go-version.outputs.go-version }}
159 | os: ${{ matrix.goos }}
160 | arch: ${{ matrix.goarch }}
161 | reproducible: report
162 | instructions: |
163 | go build \
164 | -o "$BIN_PATH" \
165 | -trimpath \
166 | -buildvcs=false \
167 | -ldflags "-s -w"
168 | cp LICENSE "$TARGET_DIR/LICENSE.txt"
169 |
170 | whats-next:
171 | needs:
172 | - build
173 | - generate-metadata-file
174 | - upload-terraform-registry-manifest-artifact
175 | runs-on: ubuntu-latest
176 | name: "What's next?"
177 | steps:
178 | - name: "Write a helpful summary"
179 | run: |
180 | github_dot_com="${{ github.server_url }}"
181 | owner_with_name="${{ github.repository }}"
182 | ref="${{ github.ref }}"
183 |
184 | echo "### What's next?" >> "$GITHUB_STEP_SUMMARY"
185 | echo "#### For a release branch (see \`.release/ci.hcl\`)" >> $GITHUB_STEP_SUMMARY
186 | echo "After this \`build\` workflow run completes succesfully, you can expect the CRT \`prepare\` workflow to begin momentarily." >> "$GITHUB_STEP_SUMMARY"
187 | echo "To find the \`prepare\` workflow run, [view the checks for this commit]($github_dot_com/$owner_with_name/commits/$ref)" >> "$GITHUB_STEP_SUMMARY"
--------------------------------------------------------------------------------
/.github/workflows/ci-changie.yml:
--------------------------------------------------------------------------------
1 | # DO NOT EDIT - This GitHub Workflow is managed by automation
2 | # https://github.com/hashicorp/terraform-devex-repos
3 |
4 | # Continuous integration handling for changie
5 | name: ci-changie
6 |
7 | on:
8 | pull_request:
9 | paths:
10 | - .changes/unreleased/*.yaml
11 | - .changie.yaml
12 | - .github/workflows/ci-changie.yml
13 |
14 | permissions:
15 | contents: read
16 |
17 | jobs:
18 | check:
19 | runs-on: ubuntu-latest
20 | steps:
21 | # Ensure terraform-devex-repos is updated on version changes.
22 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
23 | # Ensure terraform-devex-repos is updated on version changes.
24 | - uses: miniscruff/changie-action@6dcc2533cac0495148ed4046c438487e4dceaa23 # v2.0.0
25 | with:
26 | version: latest
27 | args: batch patch --dry-run
28 |
--------------------------------------------------------------------------------
/.github/workflows/compliance.yml:
--------------------------------------------------------------------------------
1 | name: compliance
2 |
3 | on:
4 | pull_request:
5 |
6 | permissions:
7 | contents: read
8 |
9 | jobs:
10 | # Reference: ENGSRV-059
11 | copywrite:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
15 | - uses: hashicorp/setup-copywrite@32638da2d4e81d56a0764aa1547882fc4d209636 # v1.1.3
16 | - run: copywrite headers --plan
17 | - run: copywrite license --plan
18 |
--------------------------------------------------------------------------------
/.github/workflows/issue-comment-triage.yml:
--------------------------------------------------------------------------------
1 | # DO NOT EDIT - This GitHub Workflow is managed by automation
2 | # https://github.com/hashicorp/terraform-devex-repos
3 | name: Issue Comment Triage
4 |
5 | on:
6 | issue_comment:
7 | types: [created]
8 |
9 | jobs:
10 | issue_comment_triage:
11 | runs-on: ubuntu-latest
12 | env:
13 | # issue_comment events are triggered by comments on issues and pull requests. Checking the
14 | # value of github.event.issue.pull_request tells us whether the issue is an issue or is
15 | # actually a pull request, allowing us to dynamically set the gh subcommand:
16 | # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#issue_comment-on-issues-only-or-pull-requests-only
17 | COMMAND: ${{ github.event.issue.pull_request && 'pr' || 'issue' }}
18 | GH_TOKEN: ${{ github.token }}
19 | steps:
20 | - name: 'Remove waiting-response on comment'
21 | run: gh ${{ env.COMMAND }} edit ${{ github.event.issue.html_url }} --remove-label waiting-response
22 |
--------------------------------------------------------------------------------
/.github/workflows/issue-opened.yml:
--------------------------------------------------------------------------------
1 | name: Issue Opened Triage
2 |
3 | on:
4 | issues:
5 | types: [opened]
6 |
7 | jobs:
8 | issue_triage:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
12 | - uses: github/issue-labeler@c1b0f9f52a63158c4adc09425e858e87b32e9685 # v3.4
13 | with:
14 | repo-token: "${{ secrets.GITHUB_TOKEN }}"
15 | configuration-path: .github/labeler-issue-triage.yml
16 | enable-versioned-regex: 0
17 |
--------------------------------------------------------------------------------
/.github/workflows/lock.yml:
--------------------------------------------------------------------------------
1 | # DO NOT EDIT - This GitHub Workflow is managed by automation
2 | # https://github.com/hashicorp/terraform-devex-repos
3 | name: 'Lock Threads'
4 |
5 | on:
6 | schedule:
7 | - cron: '51 13 * * *'
8 |
9 | jobs:
10 | lock:
11 | runs-on: ubuntu-latest
12 | steps:
13 | # NOTE: When TSCCR updates the GitHub action version, update the template workflow file to avoid drift:
14 | # https://github.com/hashicorp/terraform-devex-repos/blob/main/modules/repo/workflows/lock.tftpl
15 | - uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
16 | with:
17 | github-token: ${{ github.token }}
18 | issue-inactive-days: '30'
19 | issue-lock-reason: resolved
20 | pr-inactive-days: '30'
21 | pr-lock-reason: resolved
22 |
--------------------------------------------------------------------------------
/.github/workflows/pull-request.yml:
--------------------------------------------------------------------------------
1 | name: "Pull Request Triage"
2 |
3 | on: [pull_request_target]
4 |
5 | permissions:
6 | # CodelyTV/pr-size-labeler uses issues URL for labeling
7 | issues: write
8 | pull-requests: write
9 |
10 | jobs:
11 | triage:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
15 | with:
16 | configuration-path: .github/labeler-pull-request-triage.yml
17 | repo-token: "${{ secrets.GITHUB_TOKEN }}"
18 | - uses: CodelyTV/pr-size-labeler@4ec67706cd878fbc1c8db0a5dcd28b6bb412e85a # v1.10.3
19 | with:
20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
21 | xs_label: 'size/XS'
22 | xs_max_size: '30'
23 | s_label: 'size/S'
24 | s_max_size: '60'
25 | m_label: 'size/M'
26 | m_max_size: '150'
27 | l_label: 'size/L'
28 | l_max_size: '300'
29 | xl_label: 'size/XL'
30 | message_if_xl: ''
31 | files_to_ignore: 'go.sum'
32 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 | on:
3 | pull_request:
4 | branches: [ main ]
5 | paths-ignore:
6 | - 'README.md'
7 | - 'CHANGELOG.md'
8 | - 'website/*'
9 | push:
10 | branches: [ main ]
11 | paths-ignore:
12 | - 'README.md'
13 | - 'CHANGELOG.md'
14 | - 'website/*'
15 | jobs:
16 | build:
17 | name: Build
18 | runs-on: ubuntu-latest
19 | timeout-minutes: 5
20 | steps:
21 |
22 | - name: Check out code into the Go module directory
23 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
24 |
25 | - name: Set up Go
26 | uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
27 | with:
28 | go-version-file: 'go.mod'
29 | id: go
30 |
31 | - name: Run linters
32 | uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
33 | with:
34 | version: latest
35 |
36 | # We need the latest version of Terraform for our documentation generation to use
37 | - name: Set up Terraform
38 | uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
39 | with:
40 | terraform_wrapper: false
41 |
42 | - name: Generate
43 | run: make generate
44 |
45 | - name: Confirm no diff
46 | run: |
47 | git diff --compact-summary --exit-code || \
48 | (echo "*** Unexpected differences after code generation. Run 'make generate' and commit."; exit 1)
49 |
50 | - name: Build
51 | run: make build
52 |
53 |
54 | # run acceptance tests in a matrix with Terraform core versions
55 | test:
56 | name: 'Acc. Tests (OS: ${{ matrix.os }} / TF: ${{ matrix.terraform }})'
57 | needs: build
58 | runs-on: ${{ matrix.os }}
59 | timeout-minutes: 15
60 | strategy:
61 | fail-fast: false
62 | matrix:
63 | os:
64 | - macos-latest
65 | - windows-latest
66 | - ubuntu-latest
67 | terraform: ${{ fromJSON(vars.TF_VERSIONS_PROTOCOL_V5) }}
68 | steps:
69 |
70 | - name: Check out code
71 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
72 |
73 | - name: Setup Go
74 | uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
75 | with:
76 | go-version-file: 'go.mod'
77 | check-latest: true
78 |
79 | - name: Setup Terraform ${{ matrix.terraform }}
80 | uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2
81 | with:
82 | terraform_version: ${{ matrix.terraform }}
83 | terraform_wrapper: false
84 |
85 | - name: Run acceptance test
86 | run: make testacc
87 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.dll
2 | *.exe
3 | .DS_Store
4 | example.tf
5 | terraform.tfplan
6 | terraform.tfstate
7 | bin/
8 | modules-dev/
9 | /pkg/
10 | website/.vagrant
11 | website/.bundle
12 | website/build
13 | website/node_modules
14 | .vagrant/
15 | *.backup
16 | ./*.tfstate
17 | .terraform/
18 | *.log
19 | *.bak
20 | *~
21 | .*.swp
22 | .idea
23 | *.iml
24 | *.test
25 | *.iml
26 |
27 | archive/*.zip
28 | website/vendor
29 |
30 | # Test exclusions
31 | !command/test-fixtures/**/*.tfstate
32 | !command/test-fixtures/**/.terraform/
33 |
--------------------------------------------------------------------------------
/.golangci.yml:
--------------------------------------------------------------------------------
1 | version: "2"
2 | linters:
3 | default: none
4 | enable:
5 | - copyloopvar
6 | - durationcheck
7 | - errcheck
8 | - forcetypeassert
9 | - godot
10 | - govet
11 | - ineffassign
12 | - makezero
13 | - misspell
14 | - nilerr
15 | - predeclared
16 | - staticcheck
17 | - unconvert
18 | - unparam
19 | - unused
20 | - usetesting
21 | exclusions:
22 | generated: lax
23 | presets:
24 | - comments
25 | - common-false-positives
26 | - legacy
27 | - std-error-handling
28 | paths:
29 | - third_party$
30 | - builtin$
31 | - examples$
32 | issues:
33 | max-issues-per-linter: 0
34 | max-same-issues: 0
35 | formatters:
36 | enable:
37 | - gofmt
38 | exclusions:
39 | generated: lax
40 | paths:
41 | - third_party$
42 | - builtin$
43 | - examples$
44 |
--------------------------------------------------------------------------------
/.release/ci.hcl:
--------------------------------------------------------------------------------
1 | # Reference: https://github.com/hashicorp/crt-core-helloworld/blob/main/.release/ci.hcl (private repository)
2 |
3 | schema = "2"
4 |
5 | project "terraform-provider-archive" {
6 | // team is currently unused and has no meaning
7 | // but is required to be non-empty by CRT orchestator
8 | team = "_UNUSED_"
9 |
10 | slack {
11 | notification_channel = "C02BASDVCDT" // #feed-terraform-sdk
12 | }
13 |
14 | github {
15 | organization = "hashicorp"
16 | repository = "terraform-provider-archive"
17 | release_branches = ["main", "release/**"]
18 | }
19 | }
20 |
21 | event "merge" {
22 | }
23 |
24 | event "build" {
25 | action "build" {
26 | depends = ["merge"]
27 |
28 | organization = "hashicorp"
29 | repository = "terraform-provider-archive"
30 | workflow = "build"
31 | }
32 | }
33 |
34 | event "prepare" {
35 | # `prepare` is the Common Release Tooling (CRT) artifact processing workflow.
36 | # It prepares artifacts for potential promotion to staging and production.
37 | # For example, it scans and signs artifacts.
38 |
39 | depends = ["build"]
40 |
41 | action "prepare" {
42 | organization = "hashicorp"
43 | repository = "crt-workflows-common"
44 | workflow = "prepare"
45 | depends = ["build"]
46 | }
47 |
48 | notification {
49 | on = "fail"
50 | }
51 | }
52 |
53 | event "trigger-staging" {
54 | }
55 |
56 | event "promote-staging" {
57 | action "promote-staging" {
58 | organization = "hashicorp"
59 | repository = "crt-workflows-common"
60 | workflow = "promote-staging"
61 | depends = null
62 | config = "release-metadata.hcl"
63 | }
64 |
65 | depends = ["trigger-staging"]
66 |
67 | notification {
68 | on = "always"
69 | }
70 | }
71 |
72 | event "trigger-production" {
73 | }
74 |
75 | event "promote-production" {
76 | action "promote-production" {
77 | organization = "hashicorp"
78 | repository = "crt-workflows-common"
79 | workflow = "promote-production"
80 | depends = null
81 | config = ""
82 | }
83 |
84 | depends = ["trigger-production"]
85 |
86 | notification {
87 | on = "always"
88 | }
89 | }
--------------------------------------------------------------------------------
/.release/release-metadata.hcl:
--------------------------------------------------------------------------------
1 | url_source_repository = "https://github.com/hashicorp/terraform-provider-archive"
2 | url_project_website = "https://registry.terraform.io/providers/hashicorp/archive"
3 | url_license = "https://github.com/hashicorp/terraform-provider-archive/blob/main/LICENSE"
4 | url_release_notes = "https://github.com/hashicorp/terraform-provider-archive/blob/main/CHANGELOG.md"
--------------------------------------------------------------------------------
/.release/security-scan.hcl:
--------------------------------------------------------------------------------
1 | # Reference: https://github.com/hashicorp/security-scanner/blob/main/CONFIG.md#binary (private repository)
2 |
3 | binary {
4 | secrets {
5 | all = true
6 | }
7 | go_modules = true
8 | osv = true
9 | oss_index = false
10 | nvd = false
11 | }
--------------------------------------------------------------------------------
/.release/terraform-provider-archive-artifacts.hcl:
--------------------------------------------------------------------------------
1 | schema = 1
2 | artifacts {
3 | # This should match the `matrix` in .github/workflows/build.yml
4 | zip = [
5 | "terraform-provider-archive_${version}_darwin_amd64.zip",
6 | "terraform-provider-archive_${version}_darwin_arm64.zip",
7 | "terraform-provider-archive_${version}_freebsd_386.zip",
8 | "terraform-provider-archive_${version}_freebsd_amd64.zip",
9 | "terraform-provider-archive_${version}_freebsd_arm.zip",
10 | "terraform-provider-archive_${version}_linux_386.zip",
11 | "terraform-provider-archive_${version}_linux_amd64.zip",
12 | "terraform-provider-archive_${version}_linux_arm.zip",
13 | "terraform-provider-archive_${version}_linux_arm64.zip",
14 | "terraform-provider-archive_${version}_windows_386.zip",
15 | "terraform-provider-archive_${version}_windows_amd64.zip",
16 | ]
17 | }
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 2.7.1 (May 12, 2025)
2 |
3 | NOTES:
4 |
5 | * Update dependencies: ([#425](https://github.com/hashicorp/terraform-provider-archive/pull/425))
6 |
7 | ## 2.7.1-alpha1 (May 08, 2025)
8 |
9 | NOTES:
10 |
11 | * all: This release is being used to test new build and release actions. ([#427](https://github.com/hashicorp/terraform-provider-archive/issues/427))
12 |
13 | ## 2.7.0 (December 05, 2024)
14 |
15 | FEATURES:
16 |
17 | * resource/archive_file: Remove `deprecated` status ([#218](https://github.com/hashicorp/terraform-provider-archive/issues/218))
18 |
19 | ## 2.6.0 (September 09, 2024)
20 |
21 | FEATURES:
22 |
23 | * data-source/archive_file: Add support for creating `tar.gz` archive files. ([#277](https://github.com/hashicorp/terraform-provider-archive/issues/277))
24 | * resource/archive_file: Add support for creating `tar.gz` archive files. ([#277](https://github.com/hashicorp/terraform-provider-archive/issues/277))
25 |
26 | ## 2.5.0 (July 31, 2024)
27 |
28 | ENHANCEMENTS:
29 |
30 | * data-source/archive_file: Add glob pattern matching support to the `excludes` attribute. ([#354](https://github.com/hashicorp/terraform-provider-archive/issues/354))
31 | * resource/archive_file: Add glob pattern matching support to the `excludes` attribute. ([#354](https://github.com/hashicorp/terraform-provider-archive/issues/354))
32 |
33 | ## 2.4.2 (January 24, 2024)
34 |
35 | BUG FIXES:
36 |
37 | * data-source/archive_file: Prevent error when generating archive from source containing symbolically linked directories, and `exclude_symlink_directories` is set to true ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
38 | * resource/archive_file: Prevent error when generating archive from source containing symbolically linked directories, and `exclude_symlink_directories` is set to true ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
39 | * resource/archive_file: Return error when generated archive would be empty ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
40 | * data-source/archive_file: Return error when generated archive would be empty ([#298](https://github.com/hashicorp/terraform-provider-archive/issues/298))
41 |
42 | ## 2.4.1 (December 18, 2023)
43 |
44 | NOTES:
45 |
46 | * This release introduces no functional changes. It does however include dependency updates which address upstream CVEs. ([#287](https://github.com/hashicorp/terraform-provider-archive/issues/287))
47 |
48 | ## 2.4.0 (June 07, 2023)
49 |
50 | NOTES:
51 |
52 | * This Go module has been updated to Go 1.19 per the [Go support policy](https://golang.org/doc/devel/release.html#policy). Any consumers building on earlier Go versions may experience errors. ([#200](https://github.com/hashicorp/terraform-provider-archive/issues/200))
53 |
54 | ENHANCEMENTS:
55 |
56 | * data-source/archive_file: Added attribute `exclude_symlink_directories` which will exclude symbolically linked directories from the archive when set to true. Defaults to false ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
57 | * resource/archive_file: Added attribute `exclude_symlink_directories` which will exclude symbolically linked directories from the archive when set to true. Defaults to false ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
58 |
59 | BUG FIXES:
60 |
61 | * data-source/archive_file: Symbolically linked directories are included in archives by default rather than generating an error ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
62 | * resource/archive_file: Symbolically linked directories are included in archives by default rather than generating an error ([#183](https://github.com/hashicorp/terraform-provider-archive/issues/183))
63 | ## 2.3.0 (January 18, 2023)
64 |
65 | NOTES:
66 |
67 | * Provider has been re-written using the new [`terraform-plugin-framework`](https://www.terraform.io/plugin/framework) ([#170](https://github.com/hashicorp/terraform-provider-archive/pull/170)).
68 |
69 | ## 2.2.0 (May 04, 2021)
70 |
71 | ENHANCEMENTS:
72 |
73 | * New opt-in flag to specify the `output_file_mode` to produce more deterministic behavior across operating systems. ([#90](https://github.com/terraform-providers/terraform-provider-archive/issues/90))
74 |
75 | DEPENDENCIES:
76 |
77 | * Update `github.com/hashicorp/terraform-plugin-sdk/v2` to `v2.6.1` ([#95](https://github.com/terraform-providers/terraform-provider-archive/issues/95))
78 |
79 | NOTES:
80 |
81 | Changelogs now list all dependency updates in a separate section. These are understood to have no user-facing changes except those detailed in earlier sections.
82 |
83 | ## 2.1.0 (February 19, 2021)
84 |
85 | Binary releases of this provider now include the darwin-arm64 platform. This version contains no further changes.
86 |
87 | ## 2.0.0 (October 14, 2020)
88 |
89 | Binary releases of this provider now include the linux-arm64 platform.
90 |
91 | BREAKING CHANGES:
92 |
93 | * Upgrade to version 2 of the Terraform Plugin SDK, which drops support for Terraform 0.11. This provider will continue to work as expected for users of Terraform 0.11, which will not download the new version. ([#72](https://github.com/terraform-providers/terraform-provider-archive/issues/72))
94 |
95 | BUG FIXES:
96 |
97 | * Fixed path bug with exclusions on Windows ([#71](https://github.com/terraform-providers/terraform-provider-archive/issues/71))
98 |
99 | ## 1.3.0 (September 30, 2019)
100 |
101 | NOTES:
102 |
103 | * The provider has switched to the standalone TF SDK, there should be no noticeable impact on compatibility. ([#50](https://github.com/terraform-providers/terraform-provider-archive/issues/50))
104 |
105 | ## 1.2.2 (April 30, 2019)
106 |
107 | * This release includes another Terraform SDK upgrade intended to align with that being used for other providers as we prepare for the Core v0.12.0 release. It should have no significant changes in behavior for this provider.
108 |
109 | ## 1.2.1 (April 12, 2019)
110 |
111 | * This release includes only a Terraform SDK upgrade intended to align with that being used for other providers as we prepare for the Core v0.12.0 release. It should have no significant changes in behavior for this provider.
112 |
113 | ## 1.2.0 (March 20, 2019)
114 |
115 | IMPROVEMENTS:
116 |
117 | * The provider is now compatible with Terraform v0.12, while retaining compatibility with prior versions.
118 |
119 | ## 1.1.0 (July 30, 2018)
120 |
121 | ENHANCEMENTS:
122 |
123 | * Add `excludes` to the `archive_file` data source to exclude files when using `source_dir` ([#18](https://github.com/terraform-providers/terraform-provider-archive/issues/18))
124 |
125 | BUG FIXES:
126 |
127 | * Fix zip file path names to use forward slash on Windows ([#25](https://github.com/terraform-providers/terraform-provider-archive/issues/25))
128 | * Fix panic in `filepath.Walk` call ([#26](https://github.com/terraform-providers/terraform-provider-archive/issues/26))
129 |
130 | ## 1.0.3 (March 23, 2018)
131 |
132 | BUG FIXES:
133 |
134 | * Fix modified time affecting zip contents and causing spurious diffs ([#16](https://github.com/terraform-providers/terraform-provider-archive/issues/16))
135 |
136 | ## 1.0.2 (March 16, 2018)
137 |
138 | BUG FIXES:
139 |
140 | * Fix issue with flags not being copied on a single file and regression introduced in 1.0.1 ([#13](https://github.com/terraform-providers/terraform-provider-archive/issues/13))
141 |
142 | ## 1.0.1 (March 13, 2018)
143 |
144 | BUG FIXES:
145 |
146 | * Fix issue with flags not being copied in to archive ([#9](https://github.com/terraform-providers/terraform-provider-archive/issues/9))
147 |
148 | ## 1.0.0 (September 15, 2017)
149 |
150 | * No changes from 0.1.0; just adjusting to [the new version numbering scheme](https://www.hashicorp.com/blog/hashicorp-terraform-provider-versioning/).
151 |
152 | ## 0.1.0 (June 20, 2017)
153 |
154 | NOTES:
155 |
156 | * Same functionality as that of Terraform 0.9.8. Repacked as part of [Provider Splitout](https://www.hashicorp.com/blog/upcoming-provider-changes-in-terraform-0-10/)
157 |
--------------------------------------------------------------------------------
/DESIGN.md:
--------------------------------------------------------------------------------
1 | # Archive Provider Design
2 |
3 | The Archive Provider offers focussed functionality specifically geared towards archiving files. The provider generates
4 | zip archives of individual or multiple files.
5 |
6 | Below we have a collection of _Goals_ and _Patterns_: they represent the guiding principles applied during the
7 | development of this provider. Some are in place, others are ongoing processes, others are still just inspirational.
8 |
9 | ## Goals
10 |
11 | * [_Stability over features_](.github/CONTRIBUTING.md)
12 | * Provide a mechanism for generating zip archives.
13 |
14 | General to development:
15 |
16 | * **Avoid repetition**: the entities managed can sometimes require similar pieces of logic and/or schema to be realised.
17 | When this happens it's important to keep the code shared in communal sections, so to avoid having to modify code in
18 | multiple places when they start changing.
19 | * **Test expectations as well as bugs**: While it's typical to write tests to exercise a new functionality, it's key to
20 | also provide tests for issues that get identified and fixed, so to prove resolution as well as avoid regression.
21 | * **Automate boring tasks**: Processes that are manual, repetitive and can be automated, should be. In addition to be a
22 | time-saving practice, this ensures consistency and reduces human error (ex. static code analysis).
23 | * **Semantic versioning**: Adhering to HashiCorp's own
24 | [Versioning Specification](https://www.terraform.io/plugin/sdkv2/best-practices/versioning#versioning-specification)
25 | ensures we provide a consistent practitioner experience, and a clear process to deprecation and decommission.
26 |
--------------------------------------------------------------------------------
/GNUmakefile:
--------------------------------------------------------------------------------
1 | default: build
2 |
3 | build:
4 | go build -v ./...
5 |
6 | install: build
7 | go install -v ./...
8 |
9 | # See https://golangci-lint.run/
10 | lint:
11 | golangci-lint run
12 |
13 | # Generate docs and copywrite headers
14 | generate:
15 | cd tools; go generate ./...
16 |
17 | fmt:
18 | gofmt -s -w -e .
19 |
20 | test:
21 | go test -v -cover -timeout=120s -parallel=4 ./...
22 |
23 | testacc:
24 | TF_ACC=1 go test -v -cover -timeout 120m ./...
25 |
26 | .PHONY: build install lint generate fmt test testacc
27 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2017 HashiCorp, Inc.
2 |
3 | Mozilla Public License Version 2.0
4 | ==================================
5 |
6 | 1. Definitions
7 | --------------
8 |
9 | 1.1. "Contributor"
10 | means each individual or legal entity that creates, contributes to
11 | the creation of, or owns Covered Software.
12 |
13 | 1.2. "Contributor Version"
14 | means the combination of the Contributions of others (if any) used
15 | by a Contributor and that particular Contributor's Contribution.
16 |
17 | 1.3. "Contribution"
18 | means Covered Software of a particular Contributor.
19 |
20 | 1.4. "Covered Software"
21 | means Source Code Form to which the initial Contributor has attached
22 | the notice in Exhibit A, the Executable Form of such Source Code
23 | Form, and Modifications of such Source Code Form, in each case
24 | including portions thereof.
25 |
26 | 1.5. "Incompatible With Secondary Licenses"
27 | means
28 |
29 | (a) that the initial Contributor has attached the notice described
30 | in Exhibit B to the Covered Software; or
31 |
32 | (b) that the Covered Software was made available under the terms of
33 | version 1.1 or earlier of the License, but not also under the
34 | terms of a Secondary License.
35 |
36 | 1.6. "Executable Form"
37 | means any form of the work other than Source Code Form.
38 |
39 | 1.7. "Larger Work"
40 | means a work that combines Covered Software with other material, in
41 | a separate file or files, that is not Covered Software.
42 |
43 | 1.8. "License"
44 | means this document.
45 |
46 | 1.9. "Licensable"
47 | means having the right to grant, to the maximum extent possible,
48 | whether at the time of the initial grant or subsequently, any and
49 | all of the rights conveyed by this License.
50 |
51 | 1.10. "Modifications"
52 | means any of the following:
53 |
54 | (a) any file in Source Code Form that results from an addition to,
55 | deletion from, or modification of the contents of Covered
56 | Software; or
57 |
58 | (b) any new file in Source Code Form that contains any Covered
59 | Software.
60 |
61 | 1.11. "Patent Claims" of a Contributor
62 | means any patent claim(s), including without limitation, method,
63 | process, and apparatus claims, in any patent Licensable by such
64 | Contributor that would be infringed, but for the grant of the
65 | License, by the making, using, selling, offering for sale, having
66 | made, import, or transfer of either its Contributions or its
67 | Contributor Version.
68 |
69 | 1.12. "Secondary License"
70 | means either the GNU General Public License, Version 2.0, the GNU
71 | Lesser General Public License, Version 2.1, the GNU Affero General
72 | Public License, Version 3.0, or any later versions of those
73 | licenses.
74 |
75 | 1.13. "Source Code Form"
76 | means the form of the work preferred for making modifications.
77 |
78 | 1.14. "You" (or "Your")
79 | means an individual or a legal entity exercising rights under this
80 | License. For legal entities, "You" includes any entity that
81 | controls, is controlled by, or is under common control with You. For
82 | purposes of this definition, "control" means (a) the power, direct
83 | or indirect, to cause the direction or management of such entity,
84 | whether by contract or otherwise, or (b) ownership of more than
85 | fifty percent (50%) of the outstanding shares or beneficial
86 | ownership of such entity.
87 |
88 | 2. License Grants and Conditions
89 | --------------------------------
90 |
91 | 2.1. Grants
92 |
93 | Each Contributor hereby grants You a world-wide, royalty-free,
94 | non-exclusive license:
95 |
96 | (a) under intellectual property rights (other than patent or trademark)
97 | Licensable by such Contributor to use, reproduce, make available,
98 | modify, display, perform, distribute, and otherwise exploit its
99 | Contributions, either on an unmodified basis, with Modifications, or
100 | as part of a Larger Work; and
101 |
102 | (b) under Patent Claims of such Contributor to make, use, sell, offer
103 | for sale, have made, import, and otherwise transfer either its
104 | Contributions or its Contributor Version.
105 |
106 | 2.2. Effective Date
107 |
108 | The licenses granted in Section 2.1 with respect to any Contribution
109 | become effective for each Contribution on the date the Contributor first
110 | distributes such Contribution.
111 |
112 | 2.3. Limitations on Grant Scope
113 |
114 | The licenses granted in this Section 2 are the only rights granted under
115 | this License. No additional rights or licenses will be implied from the
116 | distribution or licensing of Covered Software under this License.
117 | Notwithstanding Section 2.1(b) above, no patent license is granted by a
118 | Contributor:
119 |
120 | (a) for any code that a Contributor has removed from Covered Software;
121 | or
122 |
123 | (b) for infringements caused by: (i) Your and any other third party's
124 | modifications of Covered Software, or (ii) the combination of its
125 | Contributions with other software (except as part of its Contributor
126 | Version); or
127 |
128 | (c) under Patent Claims infringed by Covered Software in the absence of
129 | its Contributions.
130 |
131 | This License does not grant any rights in the trademarks, service marks,
132 | or logos of any Contributor (except as may be necessary to comply with
133 | the notice requirements in Section 3.4).
134 |
135 | 2.4. Subsequent Licenses
136 |
137 | No Contributor makes additional grants as a result of Your choice to
138 | distribute the Covered Software under a subsequent version of this
139 | License (see Section 10.2) or under the terms of a Secondary License (if
140 | permitted under the terms of Section 3.3).
141 |
142 | 2.5. Representation
143 |
144 | Each Contributor represents that the Contributor believes its
145 | Contributions are its original creation(s) or it has sufficient rights
146 | to grant the rights to its Contributions conveyed by this License.
147 |
148 | 2.6. Fair Use
149 |
150 | This License is not intended to limit any rights You have under
151 | applicable copyright doctrines of fair use, fair dealing, or other
152 | equivalents.
153 |
154 | 2.7. Conditions
155 |
156 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
157 | in Section 2.1.
158 |
159 | 3. Responsibilities
160 | -------------------
161 |
162 | 3.1. Distribution of Source Form
163 |
164 | All distribution of Covered Software in Source Code Form, including any
165 | Modifications that You create or to which You contribute, must be under
166 | the terms of this License. You must inform recipients that the Source
167 | Code Form of the Covered Software is governed by the terms of this
168 | License, and how they can obtain a copy of this License. You may not
169 | attempt to alter or restrict the recipients' rights in the Source Code
170 | Form.
171 |
172 | 3.2. Distribution of Executable Form
173 |
174 | If You distribute Covered Software in Executable Form then:
175 |
176 | (a) such Covered Software must also be made available in Source Code
177 | Form, as described in Section 3.1, and You must inform recipients of
178 | the Executable Form how they can obtain a copy of such Source Code
179 | Form by reasonable means in a timely manner, at a charge no more
180 | than the cost of distribution to the recipient; and
181 |
182 | (b) You may distribute such Executable Form under the terms of this
183 | License, or sublicense it under different terms, provided that the
184 | license for the Executable Form does not attempt to limit or alter
185 | the recipients' rights in the Source Code Form under this License.
186 |
187 | 3.3. Distribution of a Larger Work
188 |
189 | You may create and distribute a Larger Work under terms of Your choice,
190 | provided that You also comply with the requirements of this License for
191 | the Covered Software. If the Larger Work is a combination of Covered
192 | Software with a work governed by one or more Secondary Licenses, and the
193 | Covered Software is not Incompatible With Secondary Licenses, this
194 | License permits You to additionally distribute such Covered Software
195 | under the terms of such Secondary License(s), so that the recipient of
196 | the Larger Work may, at their option, further distribute the Covered
197 | Software under the terms of either this License or such Secondary
198 | License(s).
199 |
200 | 3.4. Notices
201 |
202 | You may not remove or alter the substance of any license notices
203 | (including copyright notices, patent notices, disclaimers of warranty,
204 | or limitations of liability) contained within the Source Code Form of
205 | the Covered Software, except that You may alter any license notices to
206 | the extent required to remedy known factual inaccuracies.
207 |
208 | 3.5. Application of Additional Terms
209 |
210 | You may choose to offer, and to charge a fee for, warranty, support,
211 | indemnity or liability obligations to one or more recipients of Covered
212 | Software. However, You may do so only on Your own behalf, and not on
213 | behalf of any Contributor. You must make it absolutely clear that any
214 | such warranty, support, indemnity, or liability obligation is offered by
215 | You alone, and You hereby agree to indemnify every Contributor for any
216 | liability incurred by such Contributor as a result of warranty, support,
217 | indemnity or liability terms You offer. You may include additional
218 | disclaimers of warranty and limitations of liability specific to any
219 | jurisdiction.
220 |
221 | 4. Inability to Comply Due to Statute or Regulation
222 | ---------------------------------------------------
223 |
224 | If it is impossible for You to comply with any of the terms of this
225 | License with respect to some or all of the Covered Software due to
226 | statute, judicial order, or regulation then You must: (a) comply with
227 | the terms of this License to the maximum extent possible; and (b)
228 | describe the limitations and the code they affect. Such description must
229 | be placed in a text file included with all distributions of the Covered
230 | Software under this License. Except to the extent prohibited by statute
231 | or regulation, such description must be sufficiently detailed for a
232 | recipient of ordinary skill to be able to understand it.
233 |
234 | 5. Termination
235 | --------------
236 |
237 | 5.1. The rights granted under this License will terminate automatically
238 | if You fail to comply with any of its terms. However, if You become
239 | compliant, then the rights granted under this License from a particular
240 | Contributor are reinstated (a) provisionally, unless and until such
241 | Contributor explicitly and finally terminates Your grants, and (b) on an
242 | ongoing basis, if such Contributor fails to notify You of the
243 | non-compliance by some reasonable means prior to 60 days after You have
244 | come back into compliance. Moreover, Your grants from a particular
245 | Contributor are reinstated on an ongoing basis if such Contributor
246 | notifies You of the non-compliance by some reasonable means, this is the
247 | first time You have received notice of non-compliance with this License
248 | from such Contributor, and You become compliant prior to 30 days after
249 | Your receipt of the notice.
250 |
251 | 5.2. If You initiate litigation against any entity by asserting a patent
252 | infringement claim (excluding declaratory judgment actions,
253 | counter-claims, and cross-claims) alleging that a Contributor Version
254 | directly or indirectly infringes any patent, then the rights granted to
255 | You by any and all Contributors for the Covered Software under Section
256 | 2.1 of this License shall terminate.
257 |
258 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all
259 | end user license agreements (excluding distributors and resellers) which
260 | have been validly granted by You or Your distributors under this License
261 | prior to termination shall survive termination.
262 |
263 | ************************************************************************
264 | * *
265 | * 6. Disclaimer of Warranty *
266 | * ------------------------- *
267 | * *
268 | * Covered Software is provided under this License on an "as is" *
269 | * basis, without warranty of any kind, either expressed, implied, or *
270 | * statutory, including, without limitation, warranties that the *
271 | * Covered Software is free of defects, merchantable, fit for a *
272 | * particular purpose or non-infringing. The entire risk as to the *
273 | * quality and performance of the Covered Software is with You. *
274 | * Should any Covered Software prove defective in any respect, You *
275 | * (not any Contributor) assume the cost of any necessary servicing, *
276 | * repair, or correction. This disclaimer of warranty constitutes an *
277 | * essential part of this License. No use of any Covered Software is *
278 | * authorized under this License except under this disclaimer. *
279 | * *
280 | ************************************************************************
281 |
282 | ************************************************************************
283 | * *
284 | * 7. Limitation of Liability *
285 | * -------------------------- *
286 | * *
287 | * Under no circumstances and under no legal theory, whether tort *
288 | * (including negligence), contract, or otherwise, shall any *
289 | * Contributor, or anyone who distributes Covered Software as *
290 | * permitted above, be liable to You for any direct, indirect, *
291 | * special, incidental, or consequential damages of any character *
292 | * including, without limitation, damages for lost profits, loss of *
293 | * goodwill, work stoppage, computer failure or malfunction, or any *
294 | * and all other commercial damages or losses, even if such party *
295 | * shall have been informed of the possibility of such damages. This *
296 | * limitation of liability shall not apply to liability for death or *
297 | * personal injury resulting from such party's negligence to the *
298 | * extent applicable law prohibits such limitation. Some *
299 | * jurisdictions do not allow the exclusion or limitation of *
300 | * incidental or consequential damages, so this exclusion and *
301 | * limitation may not apply to You. *
302 | * *
303 | ************************************************************************
304 |
305 | 8. Litigation
306 | -------------
307 |
308 | Any litigation relating to this License may be brought only in the
309 | courts of a jurisdiction where the defendant maintains its principal
310 | place of business and such litigation shall be governed by laws of that
311 | jurisdiction, without reference to its conflict-of-law provisions.
312 | Nothing in this Section shall prevent a party's ability to bring
313 | cross-claims or counter-claims.
314 |
315 | 9. Miscellaneous
316 | ----------------
317 |
318 | This License represents the complete agreement concerning the subject
319 | matter hereof. If any provision of this License is held to be
320 | unenforceable, such provision shall be reformed only to the extent
321 | necessary to make it enforceable. Any law or regulation which provides
322 | that the language of a contract shall be construed against the drafter
323 | shall not be used to construe this License against a Contributor.
324 |
325 | 10. Versions of the License
326 | ---------------------------
327 |
328 | 10.1. New Versions
329 |
330 | Mozilla Foundation is the license steward. Except as provided in Section
331 | 10.3, no one other than the license steward has the right to modify or
332 | publish new versions of this License. Each version will be given a
333 | distinguishing version number.
334 |
335 | 10.2. Effect of New Versions
336 |
337 | You may distribute the Covered Software under the terms of the version
338 | of the License under which You originally received the Covered Software,
339 | or under the terms of any subsequent version published by the license
340 | steward.
341 |
342 | 10.3. Modified Versions
343 |
344 | If you create software not governed by this License, and you want to
345 | create a new license for such software, you may create and use a
346 | modified version of this License if you rename the license and remove
347 | any references to the name of the license steward (except to note that
348 | such modified license differs from this License).
349 |
350 | 10.4. Distributing Source Code Form that is Incompatible With Secondary
351 | Licenses
352 |
353 | If You choose to distribute Source Code Form that is Incompatible With
354 | Secondary Licenses under the terms of this version of the License, the
355 | notice described in Exhibit B of this License must be attached.
356 |
357 | Exhibit A - Source Code Form License Notice
358 | -------------------------------------------
359 |
360 | This Source Code Form is subject to the terms of the Mozilla Public
361 | License, v. 2.0. If a copy of the MPL was not distributed with this
362 | file, You can obtain one at http://mozilla.org/MPL/2.0/.
363 |
364 | If it is not possible or desirable to put the notice in a particular
365 | file, then You may include the notice in a location (such as a LICENSE
366 | file in a relevant directory) where a recipient would be likely to look
367 | for such a notice.
368 |
369 | You may add additional accurate notices of copyright ownership.
370 |
371 | Exhibit B - "Incompatible With Secondary Licenses" Notice
372 | ---------------------------------------------------------
373 |
374 | This Source Code Form is "Incompatible With Secondary Licenses", as
375 | defined by the Mozilla Public License, v. 2.0.
376 |
--------------------------------------------------------------------------------
/META.d/_summary.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | schema: 1.1
3 |
4 | partition: tf-ecosystem
5 |
6 | summary:
7 | owner: team-tf-core-plugins
8 | description: |
9 | Utility provider that provides a data source that can create zip archives for individual files or collections of files.
10 | visibility: public
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Terraform Provider: Archive
2 |
3 | The Archive provider interacts with files.
4 | It provides a data source that can create zip or tar.gz archives for individual files or
5 | collections of files.
6 |
7 | ## Documentation, questions and discussions
8 |
9 | Official documentation on how to use this provider can be found on the
10 | [Terraform Registry](https://registry.terraform.io/providers/hashicorp/archive/latest/docs).
11 | In case of specific questions or discussions, please use the
12 | HashiCorp [Terraform Providers Discuss forums](https://discuss.hashicorp.com/c/terraform-providers/31),
13 | in accordance with HashiCorp [Community Guidelines](https://www.hashicorp.com/community-guidelines).
14 |
15 | We also provide:
16 |
17 | * [Support](.github/SUPPORT.md) page for help when using the provider
18 | * [Contributing](.github/CONTRIBUTING.md) guidelines in case you want to help this project
19 | * [Design](DESIGN.md) documentation to understand the scope and maintenance decisions
20 |
21 | The remainder of this document will focus on the development aspects of the provider.
22 |
23 | ## Compatibility
24 |
25 | Compatibility table between this provider,
26 | the [Terraform Plugin Protocol](https://www.terraform.io/plugin/how-terraform-works#terraform-plugin-protocol)
27 | version it implements, and Terraform:
28 |
29 | | Archive Provider | Terraform Plugin Protocol | Terraform |
30 | |:----------------------:|:-------------------------:|:---------:|
31 | | `>= 2.x` | `5` | `>= 0.12` |
32 | | `>= 1.2.x`, `<= 1.3.x` | `4`, `5` | `>= 0.11` |
33 | | `<= 1.1.x` | `4` | `<= 0.11` |
34 |
35 | ## Requirements
36 |
37 | * [Terraform](https://www.terraform.io/downloads)
38 | * [Go](https://go.dev/doc/install) (1.23)
39 | * [GNU Make](https://www.gnu.org/software/make/)
40 | * [golangci-lint](https://golangci-lint.run/usage/install/#local-installation) (optional)
41 |
42 | ## Development
43 |
44 | ### Building
45 |
46 | 1. `git clone` this repository and `cd` into its directory
47 | 2. `make` will trigger the Golang build
48 |
49 | The provided `GNUmakefile` defines additional commands generally useful during development,
50 | like for running tests, generating documentation, code formatting and linting.
51 | Taking a look at it's content is recommended.
52 |
53 | ### Testing
54 |
55 | In order to test the provider, you can run
56 |
57 | * `make test` to run provider tests
58 | * `make testacc` to run provider acceptance tests
59 |
60 | It's important to note that acceptance tests (`testacc`) will actually spawn
61 | `terraform` and the provider. Read more about they work on the
62 | [official page](https://www.terraform.io/plugin/sdkv2/testing/acceptance-tests).
63 |
64 | ### Generating documentation
65 |
66 | This provider uses [terraform-plugin-docs](https://github.com/hashicorp/terraform-plugin-docs/)
67 | to generate documentation and store it in the `docs/` directory.
68 | Once a release is cut, the Terraform Registry will download the documentation from `docs/`
69 | and associate it with the release version. Read more about how this works on the
70 | [official page](https://www.terraform.io/registry/providers/docs).
71 |
72 | Use `make generate` to ensure the documentation is regenerated with any changes.
73 |
74 | ### Using a development build
75 |
76 | If [running tests and acceptance tests](#testing) isn't enough, it's possible to set up a local terraform configuration
77 | to use a development builds of the provider. This can be achieved by leveraging the Terraform CLI
78 | [configuration file development overrides](https://www.terraform.io/cli/config/config-file#development-overrides-for-provider-developers).
79 |
80 | First, use `make install` to place a fresh development build of the provider in your
81 | [`${GOBIN}`](https://pkg.go.dev/cmd/go#hdr-Compile_and_install_packages_and_dependencies)
82 | (defaults to `${GOPATH}/bin` or `${HOME}/go/bin` if `${GOPATH}` is not set). Repeat
83 | this every time you make changes to the provider locally.
84 |
85 | Then, setup your environment following [these instructions](https://www.terraform.io/plugin/debugging#terraform-cli-development-overrides)
86 | to make your local terraform use your local build.
87 |
88 | ### Testing GitHub Actions
89 |
90 | This project uses [GitHub Actions](https://docs.github.com/en/actions/automating-builds-and-tests) to realize its CI.
91 |
92 | Sometimes it might be helpful to locally reproduce the behaviour of those actions,
93 | and for this we use [act](https://github.com/nektos/act). Once installed, you can _simulate_ the actions executed
94 | when opening a PR with:
95 |
96 | ```shell
97 | # List of workflows for the 'pull_request' action
98 | $ act -l pull_request
99 |
100 | # Execute the workflows associated with the `pull_request' action
101 | $ act pull_request
102 | ```
103 |
104 | ## Releasing
105 |
106 | The releasable builds are generated from the [build GH workflow](./.github/workflows/build.yml) and the release/promotion process
107 | is completed via internal HashiCorp deployment tooling. Prior to release, the changelog should be updated in `main` with
108 | the changie tool, example:
109 |
110 | ```sh
111 | changie batch 2.7.2 && changie merge
112 | ```
113 |
114 | ## License
115 |
116 | [Mozilla Public License v2.0](./LICENSE)
117 |
--------------------------------------------------------------------------------
/docs/cdktf/python/data-sources/file.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "archive_file Data Source - terraform-provider-archive"
3 | subcategory: ""
4 | description: |-
5 | Generates an archive from content, a file, or directory of files. The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. See the archive_file resource for an alternative if you cannot persist the file, such as in a multi-phase CI or build server context.
6 | ---
7 |
8 |
9 |
10 | # archive_file (Data Source)
11 |
12 | Generates an archive from content, a file, or directory of files. The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. See the `archive_file` resource for an alternative if you cannot persist the file, such as in a multi-phase CI or build server context.
13 |
14 | ## Example Usage
15 |
16 | ```python
17 | # DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
18 | from constructs import Construct
19 | from cdktf import TerraformStack
20 | #
21 | # Provider bindings are generated by running `cdktf get`.
22 | # See https://cdk.tf/provider-generation for more details.
23 | #
24 | from imports.archive.data_archive_file import DataArchiveFile
25 | class MyConvertedCode(TerraformStack):
26 | def __init__(self, scope, name):
27 | super().__init__(scope, name)
28 | DataArchiveFile(self, "init",
29 | output_path="${path.module}/files/init.zip",
30 | source_file="${path.module}/init.tpl",
31 | type="zip"
32 | )
33 | ```
34 |
35 | ```python
36 | # DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
37 | from constructs import Construct
38 | from cdktf import Token, TerraformStack
39 | #
40 | # Provider bindings are generated by running `cdktf get`.
41 | # See https://cdk.tf/provider-generation for more details.
42 | #
43 | from imports.archive.data_archive_file import DataArchiveFile
44 | class MyConvertedCode(TerraformStack):
45 | def __init__(self, scope, name):
46 | super().__init__(scope, name)
47 | DataArchiveFile(self, "dotfiles",
48 | excludes=["${path.module}/unwanted.zip"],
49 | output_path="${path.module}/files/dotfiles.zip",
50 | source=[DataArchiveFileSource(
51 | content=Token.as_string(vimrc.rendered),
52 | filename=".vimrc"
53 | ), DataArchiveFileSource(
54 | content=Token.as_string(ssh_config.rendered),
55 | filename=".ssh/config"
56 | )
57 | ],
58 | type="zip"
59 | )
60 | ```
61 |
62 | ```python
63 | # DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
64 | from constructs import Construct
65 | from cdktf import TerraformStack
66 | #
67 | # Provider bindings are generated by running `cdktf get`.
68 | # See https://cdk.tf/provider-generation for more details.
69 | #
70 | from imports.archive.data_archive_file import DataArchiveFile
71 | class MyConvertedCode(TerraformStack):
72 | def __init__(self, scope, name):
73 | super().__init__(scope, name)
74 | DataArchiveFile(self, "lambda_my_function",
75 | output_file_mode="0666",
76 | output_path="${path.module}/files/lambda-my-function.js.zip",
77 | source_file="${path.module}/../lambda/my-function/index.js",
78 | type="zip"
79 | )
80 | ```
81 |
82 |
83 | ## Schema
84 |
85 | ### Required
86 |
87 | - `output_path` (String) The output of the archive file.
88 | - `type` (String) The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.
89 |
90 | ### Optional
91 |
92 | - `exclude_symlink_directories` (Boolean) Boolean flag indicating whether symbolically linked directories should be excluded during the creation of the archive. Defaults to `false`.
93 | - `excludes` (Set of String) Specify files/directories to ignore when reading the `source_dir`. Supports glob file matching patterns including doublestar/globstar (`**`) patterns.
94 | - `output_file_mode` (String) String that specifies the octal file mode for all archived files. For example: `"0666"`. Setting this will ensure that cross platform usage of this module will not vary the modes of archived files (and ultimately checksums) resulting in more deterministic behavior.
95 | - `source` (Block Set) Specifies attributes of a single source file to include into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified. (see [below for nested schema](#nestedblock--source))
96 | - `source_content` (String) Add only this content to the archive with `source_content_filename` as the filename. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
97 | - `source_content_filename` (String) Set this as the filename when using `source_content`. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
98 | - `source_dir` (String) Package entire contents of this directory into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
99 | - `source_file` (String) Package this file into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
100 |
101 | ### Read-Only
102 |
103 | - `id` (String) The sha1 checksum hash of the output.
104 | - `output_base64sha256` (String) Base64 Encoded SHA256 checksum of output file
105 | - `output_base64sha512` (String) Base64 Encoded SHA512 checksum of output file
106 | - `output_md5` (String) MD5 of output file
107 | - `output_sha` (String) SHA1 checksum of output file
108 | - `output_sha256` (String) SHA256 checksum of output file
109 | - `output_sha512` (String) SHA512 checksum of output file
110 | - `output_size` (Number) The byte size of the output archive file.
111 |
112 |
113 | ### Nested Schema for `source`
114 |
115 | Required:
116 |
117 | - `content` (String) Add this content to the archive with `filename` as the filename.
118 | - `filename` (String) Set this as the filename when declaring a `source`.
119 |
120 |
--------------------------------------------------------------------------------
/docs/cdktf/python/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "Provider: Archive"
3 | description: |-
4 | The Archive provider is used to manage archive files.
5 | ---
6 |
7 |
8 |
9 | # Archive Provider
10 |
11 | The archive provider exposes resources to manage archive files.
12 |
13 | This provider requires no configuration. For information on the resources
14 | it provides, see the navigation bar.
15 |
--------------------------------------------------------------------------------
/docs/cdktf/python/resources/file.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "archive_file Resource - terraform-provider-archive"
3 | subcategory: ""
4 | description: |-
5 | Generates an archive from content, a file, or directory of files.
6 | ---
7 |
8 |
9 |
10 | # archive_file (Resource)
11 |
12 | Generates an archive from content, a file, or directory of files.
13 |
14 | ## Example Usage
15 |
16 | ```python
17 | # DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
18 | from constructs import Construct
19 | from cdktf import TerraformStack
20 | #
21 | # Provider bindings are generated by running `cdktf get`.
22 | # See https://cdk.tf/provider-generation for more details.
23 | #
24 | from imports.archive.file import File
25 | class MyConvertedCode(TerraformStack):
26 | def __init__(self, scope, name):
27 | super().__init__(scope, name)
28 | File(self, "init",
29 | output_path="${path.module}/files/init.zip",
30 | source_file="${path.module}/init.tpl",
31 | type="zip"
32 | )
33 | ```
34 |
35 | ```python
36 | # DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
37 | from constructs import Construct
38 | from cdktf import Token, TerraformStack
39 | #
40 | # Provider bindings are generated by running `cdktf get`.
41 | # See https://cdk.tf/provider-generation for more details.
42 | #
43 | from imports.archive.file import File
44 | class MyConvertedCode(TerraformStack):
45 | def __init__(self, scope, name):
46 | super().__init__(scope, name)
47 | File(self, "dotfiles",
48 | excludes=["${path.module}/unwanted.zip"],
49 | output_path="${path.module}/files/dotfiles.zip",
50 | source=[FileSource(
51 | content=Token.as_string(vimrc.rendered),
52 | filename=".vimrc"
53 | ), FileSource(
54 | content=Token.as_string(ssh_config.rendered),
55 | filename=".ssh/config"
56 | )
57 | ],
58 | type="zip"
59 | )
60 | ```
61 |
62 | ```python
63 | # DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
64 | from constructs import Construct
65 | from cdktf import TerraformStack
66 | #
67 | # Provider bindings are generated by running `cdktf get`.
68 | # See https://cdk.tf/provider-generation for more details.
69 | #
70 | from imports.archive.file import File
71 | class MyConvertedCode(TerraformStack):
72 | def __init__(self, scope, name):
73 | super().__init__(scope, name)
74 | File(self, "lambda_my_function",
75 | output_file_mode="0666",
76 | output_path="${path.module}/files/lambda-my-function.js.zip",
77 | source_file="${path.module}/../lambda/my-function/index.js",
78 | type="zip"
79 | )
80 | ```
81 |
82 |
83 | ## Schema
84 |
85 | ### Required
86 |
87 | - `output_path` (String) The output of the archive file.
88 | - `type` (String) The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.
89 |
90 | ### Optional
91 |
92 | - `exclude_symlink_directories` (Boolean) Boolean flag indicating whether symbolically linked directories should be excluded during the creation of the archive. Defaults to `false`.
93 | - `excludes` (Set of String) Specify files/directories to ignore when reading the `source_dir`. Supports glob file matching patterns including doublestar/globstar (`**`) patterns.
94 | - `output_file_mode` (String) String that specifies the octal file mode for all archived files. For example: `"0666"`. Setting this will ensure that cross platform usage of this module will not vary the modes of archived files (and ultimately checksums) resulting in more deterministic behavior.
95 | - `source` (Block Set) Specifies attributes of a single source file to include into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified. (see [below for nested schema](#nestedblock--source))
96 | - `source_content` (String) Add only this content to the archive with `source_content_filename` as the filename. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
97 | - `source_content_filename` (String) Set this as the filename when using `source_content`. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
98 | - `source_dir` (String) Package entire contents of this directory into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
99 | - `source_file` (String) Package this file into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
100 |
101 | ### Read-Only
102 |
103 | - `id` (String) The sha1 checksum hash of the output.
104 | - `output_base64sha256` (String) Base64 Encoded SHA256 checksum of output file
105 | - `output_base64sha512` (String) Base64 Encoded SHA512 checksum of output file
106 | - `output_md5` (String) MD5 of output file
107 | - `output_sha` (String) SHA1 checksum of output file
108 | - `output_sha256` (String) SHA256 checksum of output file
109 | - `output_sha512` (String) SHA512 checksum of output file
110 | - `output_size` (Number) The byte size of the output archive file.
111 |
112 |
113 | ### Nested Schema for `source`
114 |
115 | Required:
116 |
117 | - `content` (String) Add this content to the archive with `filename` as the filename.
118 | - `filename` (String) Set this as the filename when declaring a `source`.
119 |
120 |
--------------------------------------------------------------------------------
/docs/cdktf/typescript/data-sources/file.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "archive_file Data Source - terraform-provider-archive"
3 | subcategory: ""
4 | description: |-
5 | Generates an archive from content, a file, or directory of files. The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. See the archive_file resource for an alternative if you cannot persist the file, such as in a multi-phase CI or build server context.
6 | ---
7 |
8 |
9 |
10 | # archive_file (Data Source)
11 |
12 | Generates an archive from content, a file, or directory of files. The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. See the `archive_file` resource for an alternative if you cannot persist the file, such as in a multi-phase CI or build server context.
13 |
14 | ## Example Usage
15 |
16 | ```typescript
17 | // DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
18 | import { Construct } from "constructs";
19 | import { TerraformStack } from "cdktf";
20 | /*
21 | * Provider bindings are generated by running `cdktf get`.
22 | * See https://cdk.tf/provider-generation for more details.
23 | */
24 | import { DataArchiveFile } from "./.gen/providers/archive/data-archive-file";
25 | class MyConvertedCode extends TerraformStack {
26 | constructor(scope: Construct, name: string) {
27 | super(scope, name);
28 | new DataArchiveFile(this, "init", {
29 | outputPath: "${path.module}/files/init.zip",
30 | sourceFile: "${path.module}/init.tpl",
31 | type: "zip",
32 | });
33 | }
34 | }
35 |
36 | ```
37 |
38 | ```typescript
39 | // DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
40 | import { Construct } from "constructs";
41 | import { Token, TerraformStack } from "cdktf";
42 | /*
43 | * Provider bindings are generated by running `cdktf get`.
44 | * See https://cdk.tf/provider-generation for more details.
45 | */
46 | import { DataArchiveFile } from "./.gen/providers/archive/data-archive-file";
47 | class MyConvertedCode extends TerraformStack {
48 | constructor(scope: Construct, name: string) {
49 | super(scope, name);
50 | new DataArchiveFile(this, "dotfiles", {
51 | excludes: ["${path.module}/unwanted.zip"],
52 | outputPath: "${path.module}/files/dotfiles.zip",
53 | source: [
54 | {
55 | content: Token.asString(vimrc.rendered),
56 | filename: ".vimrc",
57 | },
58 | {
59 | content: Token.asString(sshConfig.rendered),
60 | filename: ".ssh/config",
61 | },
62 | ],
63 | type: "zip",
64 | });
65 | }
66 | }
67 |
68 | ```
69 |
70 | ```typescript
71 | // DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
72 | import { Construct } from "constructs";
73 | import { TerraformStack } from "cdktf";
74 | /*
75 | * Provider bindings are generated by running `cdktf get`.
76 | * See https://cdk.tf/provider-generation for more details.
77 | */
78 | import { DataArchiveFile } from "./.gen/providers/archive/data-archive-file";
79 | class MyConvertedCode extends TerraformStack {
80 | constructor(scope: Construct, name: string) {
81 | super(scope, name);
82 | new DataArchiveFile(this, "lambda_my_function", {
83 | outputFileMode: "0666",
84 | outputPath: "${path.module}/files/lambda-my-function.js.zip",
85 | sourceFile: "${path.module}/../lambda/my-function/index.js",
86 | type: "zip",
87 | });
88 | }
89 | }
90 |
91 | ```
92 |
93 |
94 | ## Schema
95 |
96 | ### Required
97 |
98 | - `outputPath` (String) The output of the archive file.
99 | - `type` (String) The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.
100 |
101 | ### Optional
102 |
103 | - `excludeSymlinkDirectories` (Boolean) Boolean flag indicating whether symbolically linked directories should be excluded during the creation of the archive. Defaults to `false`.
104 | - `excludes` (Set of String) Specify files/directories to ignore when reading the `sourceDir`. Supports glob file matching patterns including doublestar/globstar (`**`) patterns.
105 | - `outputFileMode` (String) String that specifies the octal file mode for all archived files. For example: `"0666"`. Setting this will ensure that cross platform usage of this module will not vary the modes of archived files (and ultimately checksums) resulting in more deterministic behavior.
106 | - `source` (Block Set) Specifies attributes of a single source file to include into the archive. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified. (see [below for nested schema](#nestedblock--source))
107 | - `sourceContent` (String) Add only this content to the archive with `sourceContentFilename` as the filename. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
108 | - `sourceContentFilename` (String) Set this as the filename when using `sourceContent`. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
109 | - `sourceDir` (String) Package entire contents of this directory into the archive. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
110 | - `sourceFile` (String) Package this file into the archive. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
111 |
112 | ### Read-Only
113 |
114 | - `id` (String) The sha1 checksum hash of the output.
115 | - `outputBase64Sha256` (String) Base64 Encoded SHA256 checksum of output file
116 | - `outputBase64Sha512` (String) Base64 Encoded SHA512 checksum of output file
117 | - `outputMd5` (String) MD5 of output file
118 | - `outputSha` (String) SHA1 checksum of output file
119 | - `outputSha256` (String) SHA256 checksum of output file
120 | - `outputSha512` (String) SHA512 checksum of output file
121 | - `outputSize` (Number) The byte size of the output archive file.
122 |
123 |
124 | ### Nested Schema for `source`
125 |
126 | Required:
127 |
128 | - `content` (String) Add this content to the archive with `filename` as the filename.
129 | - `filename` (String) Set this as the filename when declaring a `source`.
130 |
131 |
--------------------------------------------------------------------------------
/docs/cdktf/typescript/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "Provider: Archive"
3 | description: |-
4 | The Archive provider is used to manage archive files.
5 | ---
6 |
7 |
8 |
9 | # Archive Provider
10 |
11 | The archive provider exposes resources to manage archive files.
12 |
13 | This provider requires no configuration. For information on the resources
14 | it provides, see the navigation bar.
15 |
--------------------------------------------------------------------------------
/docs/cdktf/typescript/resources/file.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "archive_file Resource - terraform-provider-archive"
3 | subcategory: ""
4 | description: |-
5 | Generates an archive from content, a file, or directory of files.
6 | ---
7 |
8 |
9 |
10 | # archive_file (Resource)
11 |
12 | Generates an archive from content, a file, or directory of files.
13 |
14 | ## Example Usage
15 |
16 | ```typescript
17 | // DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
18 | import { Construct } from "constructs";
19 | import { TerraformStack } from "cdktf";
20 | /*
21 | * Provider bindings are generated by running `cdktf get`.
22 | * See https://cdk.tf/provider-generation for more details.
23 | */
24 | import { File } from "./.gen/providers/archive/file";
25 | class MyConvertedCode extends TerraformStack {
26 | constructor(scope: Construct, name: string) {
27 | super(scope, name);
28 | new File(this, "init", {
29 | outputPath: "${path.module}/files/init.zip",
30 | sourceFile: "${path.module}/init.tpl",
31 | type: "zip",
32 | });
33 | }
34 | }
35 |
36 | ```
37 |
38 | ```typescript
39 | // DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
40 | import { Construct } from "constructs";
41 | import { Token, TerraformStack } from "cdktf";
42 | /*
43 | * Provider bindings are generated by running `cdktf get`.
44 | * See https://cdk.tf/provider-generation for more details.
45 | */
46 | import { File } from "./.gen/providers/archive/file";
47 | class MyConvertedCode extends TerraformStack {
48 | constructor(scope: Construct, name: string) {
49 | super(scope, name);
50 | new File(this, "dotfiles", {
51 | excludes: ["${path.module}/unwanted.zip"],
52 | outputPath: "${path.module}/files/dotfiles.zip",
53 | source: [
54 | {
55 | content: Token.asString(vimrc.rendered),
56 | filename: ".vimrc",
57 | },
58 | {
59 | content: Token.asString(sshConfig.rendered),
60 | filename: ".ssh/config",
61 | },
62 | ],
63 | type: "zip",
64 | });
65 | }
66 | }
67 |
68 | ```
69 |
70 | ```typescript
71 | // DO NOT EDIT. Code generated by 'cdktf convert' - Please report bugs at https://cdk.tf/bug
72 | import { Construct } from "constructs";
73 | import { TerraformStack } from "cdktf";
74 | /*
75 | * Provider bindings are generated by running `cdktf get`.
76 | * See https://cdk.tf/provider-generation for more details.
77 | */
78 | import { File } from "./.gen/providers/archive/file";
79 | class MyConvertedCode extends TerraformStack {
80 | constructor(scope: Construct, name: string) {
81 | super(scope, name);
82 | new File(this, "lambda_my_function", {
83 | outputFileMode: "0666",
84 | outputPath: "${path.module}/files/lambda-my-function.js.zip",
85 | sourceFile: "${path.module}/../lambda/my-function/index.js",
86 | type: "zip",
87 | });
88 | }
89 | }
90 |
91 | ```
92 |
93 |
94 | ## Schema
95 |
96 | ### Required
97 |
98 | - `outputPath` (String) The output of the archive file.
99 | - `type` (String) The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.
100 |
101 | ### Optional
102 |
103 | - `excludeSymlinkDirectories` (Boolean) Boolean flag indicating whether symbolically linked directories should be excluded during the creation of the archive. Defaults to `false`.
104 | - `excludes` (Set of String) Specify files/directories to ignore when reading the `sourceDir`. Supports glob file matching patterns including doublestar/globstar (`**`) patterns.
105 | - `outputFileMode` (String) String that specifies the octal file mode for all archived files. For example: `"0666"`. Setting this will ensure that cross platform usage of this module will not vary the modes of archived files (and ultimately checksums) resulting in more deterministic behavior.
106 | - `source` (Block Set) Specifies attributes of a single source file to include into the archive. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified. (see [below for nested schema](#nestedblock--source))
107 | - `sourceContent` (String) Add only this content to the archive with `sourceContentFilename` as the filename. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
108 | - `sourceContentFilename` (String) Set this as the filename when using `sourceContent`. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
109 | - `sourceDir` (String) Package entire contents of this directory into the archive. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
110 | - `sourceFile` (String) Package this file into the archive. One and only one of `source`, `sourceContentFilename` (with `sourceContent`), `sourceFile`, or `sourceDir` must be specified.
111 |
112 | ### Read-Only
113 |
114 | - `id` (String) The sha1 checksum hash of the output.
115 | - `outputBase64Sha256` (String) Base64 Encoded SHA256 checksum of output file
116 | - `outputBase64Sha512` (String) Base64 Encoded SHA512 checksum of output file
117 | - `outputMd5` (String) MD5 of output file
118 | - `outputSha` (String) SHA1 checksum of output file
119 | - `outputSha256` (String) SHA256 checksum of output file
120 | - `outputSha512` (String) SHA512 checksum of output file
121 | - `outputSize` (Number) The byte size of the output archive file.
122 |
123 |
124 | ### Nested Schema for `source`
125 |
126 | Required:
127 |
128 | - `content` (String) Add this content to the archive with `filename` as the filename.
129 | - `filename` (String) Set this as the filename when declaring a `source`.
130 |
131 |
--------------------------------------------------------------------------------
/docs/data-sources/file.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "archive_file Data Source - terraform-provider-archive"
3 | subcategory: ""
4 | description: |-
5 | Generates an archive from content, a file, or directory of files. The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. See the archive_file resource for an alternative if you cannot persist the file, such as in a multi-phase CI or build server context.
6 | ---
7 |
8 | # archive_file (Data Source)
9 |
10 | Generates an archive from content, a file, or directory of files. The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. See the `archive_file` resource for an alternative if you cannot persist the file, such as in a multi-phase CI or build server context.
11 |
12 | ## Example Usage
13 |
14 | ```terraform
15 | # Archive a single file.
16 |
17 | data "archive_file" "init" {
18 | type = "zip"
19 | source_file = "${path.module}/init.tpl"
20 | output_path = "${path.module}/files/init.zip"
21 | }
22 | ```
23 |
24 | ```terraform
25 | # Archive multiple files and exclude file.
26 |
27 | data "archive_file" "dotfiles" {
28 | type = "zip"
29 | output_path = "${path.module}/files/dotfiles.zip"
30 | excludes = ["${path.module}/unwanted.zip"]
31 |
32 | source {
33 | content = data.template_file.vimrc.rendered
34 | filename = ".vimrc"
35 | }
36 |
37 | source {
38 | content = data.template_file.ssh_config.rendered
39 | filename = ".ssh/config"
40 | }
41 | }
42 | ```
43 |
44 | ```terraform
45 | # Archive a file to be used with Lambda using consistent file mode
46 |
47 | data "archive_file" "lambda_my_function" {
48 | type = "zip"
49 | source_file = "${path.module}/../lambda/my-function/index.js"
50 | output_file_mode = "0666"
51 | output_path = "${path.module}/files/lambda-my-function.js.zip"
52 | }
53 | ```
54 |
55 |
56 | ## Schema
57 |
58 | ### Required
59 |
60 | - `output_path` (String) The output of the archive file.
61 | - `type` (String) The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.
62 |
63 | ### Optional
64 |
65 | - `exclude_symlink_directories` (Boolean) Boolean flag indicating whether symbolically linked directories should be excluded during the creation of the archive. Defaults to `false`.
66 | - `excludes` (Set of String) Specify files/directories to ignore when reading the `source_dir`. Supports glob file matching patterns including doublestar/globstar (`**`) patterns.
67 | - `output_file_mode` (String) String that specifies the octal file mode for all archived files. For example: `"0666"`. Setting this will ensure that cross platform usage of this module will not vary the modes of archived files (and ultimately checksums) resulting in more deterministic behavior.
68 | - `source` (Block Set) Specifies attributes of a single source file to include into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified. (see [below for nested schema](#nestedblock--source))
69 | - `source_content` (String) Add only this content to the archive with `source_content_filename` as the filename. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
70 | - `source_content_filename` (String) Set this as the filename when using `source_content`. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
71 | - `source_dir` (String) Package entire contents of this directory into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
72 | - `source_file` (String) Package this file into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
73 |
74 | ### Read-Only
75 |
76 | - `id` (String) The sha1 checksum hash of the output.
77 | - `output_base64sha256` (String) Base64 Encoded SHA256 checksum of output file
78 | - `output_base64sha512` (String) Base64 Encoded SHA512 checksum of output file
79 | - `output_md5` (String) MD5 of output file
80 | - `output_sha` (String) SHA1 checksum of output file
81 | - `output_sha256` (String) SHA256 checksum of output file
82 | - `output_sha512` (String) SHA512 checksum of output file
83 | - `output_size` (Number) The byte size of the output archive file.
84 |
85 |
86 | ### Nested Schema for `source`
87 |
88 | Required:
89 |
90 | - `content` (String) Add this content to the archive with `filename` as the filename.
91 | - `filename` (String) Set this as the filename when declaring a `source`.
92 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "Provider: Archive"
3 | description: |-
4 | The Archive provider is used to manage archive files.
5 | ---
6 |
7 | # Archive Provider
8 |
9 | The archive provider exposes resources to manage archive files.
10 |
11 | This provider requires no configuration. For information on the resources
12 | it provides, see the navigation bar.
--------------------------------------------------------------------------------
/docs/resources/file.md:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "archive_file Resource - terraform-provider-archive"
3 | subcategory: ""
4 | description: |-
5 | Generates an archive from content, a file, or directory of files.
6 | ---
7 |
8 | # archive_file (Resource)
9 |
10 | Generates an archive from content, a file, or directory of files.
11 |
12 | ## Example Usage
13 |
14 | ```terraform
15 | # Archive a single file.
16 |
17 | resource "archive_file" "init" {
18 | type = "zip"
19 | source_file = "${path.module}/init.tpl"
20 | output_path = "${path.module}/files/init.zip"
21 | }
22 | ```
23 |
24 | ```terraform
25 | # Archive multiple files and exclude file.
26 |
27 | resource "archive_file" "dotfiles" {
28 | type = "zip"
29 | output_path = "${path.module}/files/dotfiles.zip"
30 | excludes = ["${path.module}/unwanted.zip"]
31 |
32 | source {
33 | content = data.template_file.vimrc.rendered
34 | filename = ".vimrc"
35 | }
36 |
37 | source {
38 | content = data.template_file.ssh_config.rendered
39 | filename = ".ssh/config"
40 | }
41 | }
42 | ```
43 |
44 | ```terraform
45 | # Archive a file to be used with Lambda using consistent file mode
46 |
47 | resource "archive_file" "lambda_my_function" {
48 | type = "zip"
49 | source_file = "${path.module}/../lambda/my-function/index.js"
50 | output_file_mode = "0666"
51 | output_path = "${path.module}/files/lambda-my-function.js.zip"
52 | }
53 | ```
54 |
55 |
56 | ## Schema
57 |
58 | ### Required
59 |
60 | - `output_path` (String) The output of the archive file.
61 | - `type` (String) The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.
62 |
63 | ### Optional
64 |
65 | - `exclude_symlink_directories` (Boolean) Boolean flag indicating whether symbolically linked directories should be excluded during the creation of the archive. Defaults to `false`.
66 | - `excludes` (Set of String) Specify files/directories to ignore when reading the `source_dir`. Supports glob file matching patterns including doublestar/globstar (`**`) patterns.
67 | - `output_file_mode` (String) String that specifies the octal file mode for all archived files. For example: `"0666"`. Setting this will ensure that cross platform usage of this module will not vary the modes of archived files (and ultimately checksums) resulting in more deterministic behavior.
68 | - `source` (Block Set) Specifies attributes of a single source file to include into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified. (see [below for nested schema](#nestedblock--source))
69 | - `source_content` (String) Add only this content to the archive with `source_content_filename` as the filename. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
70 | - `source_content_filename` (String) Set this as the filename when using `source_content`. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
71 | - `source_dir` (String) Package entire contents of this directory into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
72 | - `source_file` (String) Package this file into the archive. One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, or `source_dir` must be specified.
73 |
74 | ### Read-Only
75 |
76 | - `id` (String) The sha1 checksum hash of the output.
77 | - `output_base64sha256` (String) Base64 Encoded SHA256 checksum of output file
78 | - `output_base64sha512` (String) Base64 Encoded SHA512 checksum of output file
79 | - `output_md5` (String) MD5 of output file
80 | - `output_sha` (String) SHA1 checksum of output file
81 | - `output_sha256` (String) SHA256 checksum of output file
82 | - `output_sha512` (String) SHA512 checksum of output file
83 | - `output_size` (Number) The byte size of the output archive file.
84 |
85 |
86 | ### Nested Schema for `source`
87 |
88 | Required:
89 |
90 | - `content` (String) Add this content to the archive with `filename` as the filename.
91 | - `filename` (String) Set this as the filename when declaring a `source`.
92 |
--------------------------------------------------------------------------------
/examples/data-sources/file/data-source.tf:
--------------------------------------------------------------------------------
1 | # Archive a single file.
2 |
3 | data "archive_file" "init" {
4 | type = "zip"
5 | source_file = "${path.module}/init.tpl"
6 | output_path = "${path.module}/files/init.zip"
7 | }
8 |
--------------------------------------------------------------------------------
/examples/data-sources/file/lambda.tf:
--------------------------------------------------------------------------------
1 | # Archive a file to be used with Lambda using consistent file mode
2 |
3 | data "archive_file" "lambda_my_function" {
4 | type = "zip"
5 | source_file = "${path.module}/../lambda/my-function/index.js"
6 | output_file_mode = "0666"
7 | output_path = "${path.module}/files/lambda-my-function.js.zip"
8 | }
9 |
--------------------------------------------------------------------------------
/examples/data-sources/file/multiple-files.tf:
--------------------------------------------------------------------------------
1 | # Archive multiple files and exclude file.
2 |
3 | data "archive_file" "dotfiles" {
4 | type = "zip"
5 | output_path = "${path.module}/files/dotfiles.zip"
6 | excludes = ["${path.module}/unwanted.zip"]
7 |
8 | source {
9 | content = data.template_file.vimrc.rendered
10 | filename = ".vimrc"
11 | }
12 |
13 | source {
14 | content = data.template_file.ssh_config.rendered
15 | filename = ".ssh/config"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/examples/resources/file/lambda.tf:
--------------------------------------------------------------------------------
1 | # Archive a file to be used with Lambda using consistent file mode
2 |
3 | resource "archive_file" "lambda_my_function" {
4 | type = "zip"
5 | source_file = "${path.module}/../lambda/my-function/index.js"
6 | output_file_mode = "0666"
7 | output_path = "${path.module}/files/lambda-my-function.js.zip"
8 | }
9 |
--------------------------------------------------------------------------------
/examples/resources/file/multiple-files.tf:
--------------------------------------------------------------------------------
1 | # Archive multiple files and exclude file.
2 |
3 | resource "archive_file" "dotfiles" {
4 | type = "zip"
5 | output_path = "${path.module}/files/dotfiles.zip"
6 | excludes = ["${path.module}/unwanted.zip"]
7 |
8 | source {
9 | content = data.template_file.vimrc.rendered
10 | filename = ".vimrc"
11 | }
12 |
13 | source {
14 | content = data.template_file.ssh_config.rendered
15 | filename = ".ssh/config"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/examples/resources/file/resource.tf:
--------------------------------------------------------------------------------
1 | # Archive a single file.
2 |
3 | resource "archive_file" "init" {
4 | type = "zip"
5 | source_file = "${path.module}/init.tpl"
6 | output_path = "${path.module}/files/init.zip"
7 | }
8 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/hashicorp/terraform-provider-archive
2 |
3 | go 1.23.7
4 |
5 | require (
6 | github.com/bmatcuk/doublestar/v4 v4.8.1
7 | github.com/hashicorp/terraform-plugin-framework v1.15.0
8 | github.com/hashicorp/terraform-plugin-framework-validators v0.18.0
9 | github.com/hashicorp/terraform-plugin-go v0.28.0
10 | github.com/hashicorp/terraform-plugin-testing v1.13.1
11 | golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56
12 | )
13 |
14 | require (
15 | github.com/ProtonMail/go-crypto v1.1.6 // indirect
16 | github.com/agext/levenshtein v1.2.2 // indirect
17 | github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
18 | github.com/cloudflare/circl v1.6.0 // indirect
19 | github.com/fatih/color v1.16.0 // indirect
20 | github.com/golang/protobuf v1.5.4 // indirect
21 | github.com/google/go-cmp v0.7.0 // indirect
22 | github.com/hashicorp/errwrap v1.1.0 // indirect
23 | github.com/hashicorp/go-checkpoint v0.5.0 // indirect
24 | github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
25 | github.com/hashicorp/go-cty v1.5.0 // indirect
26 | github.com/hashicorp/go-hclog v1.6.3 // indirect
27 | github.com/hashicorp/go-multierror v1.1.1 // indirect
28 | github.com/hashicorp/go-plugin v1.6.3 // indirect
29 | github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
30 | github.com/hashicorp/go-uuid v1.0.3 // indirect
31 | github.com/hashicorp/go-version v1.7.0 // indirect
32 | github.com/hashicorp/hc-install v0.9.2 // indirect
33 | github.com/hashicorp/hcl/v2 v2.23.0 // indirect
34 | github.com/hashicorp/logutils v1.0.0 // indirect
35 | github.com/hashicorp/terraform-exec v0.23.0 // indirect
36 | github.com/hashicorp/terraform-json v0.25.0 // indirect
37 | github.com/hashicorp/terraform-plugin-log v0.9.0 // indirect
38 | github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 // indirect
39 | github.com/hashicorp/terraform-registry-address v0.2.5 // indirect
40 | github.com/hashicorp/terraform-svchost v0.1.1 // indirect
41 | github.com/hashicorp/yamux v0.1.1 // indirect
42 | github.com/kr/pretty v0.3.0 // indirect
43 | github.com/mattn/go-colorable v0.1.13 // indirect
44 | github.com/mattn/go-isatty v0.0.20 // indirect
45 | github.com/mitchellh/copystructure v1.2.0 // indirect
46 | github.com/mitchellh/go-testing-interface v1.14.1 // indirect
47 | github.com/mitchellh/go-wordwrap v1.0.0 // indirect
48 | github.com/mitchellh/mapstructure v1.5.0 // indirect
49 | github.com/mitchellh/reflectwalk v1.0.2 // indirect
50 | github.com/oklog/run v1.0.0 // indirect
51 | github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
52 | github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
53 | github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
54 | github.com/zclconf/go-cty v1.16.2 // indirect
55 | golang.org/x/crypto v0.38.0 // indirect
56 | golang.org/x/mod v0.24.0 // indirect
57 | golang.org/x/net v0.39.0 // indirect
58 | golang.org/x/sync v0.14.0 // indirect
59 | golang.org/x/sys v0.33.0 // indirect
60 | golang.org/x/text v0.25.0 // indirect
61 | golang.org/x/tools v0.23.0 // indirect
62 | google.golang.org/appengine v1.6.8 // indirect
63 | google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a // indirect
64 | google.golang.org/grpc v1.72.1 // indirect
65 | google.golang.org/protobuf v1.36.6 // indirect
66 | )
67 |
--------------------------------------------------------------------------------
/internal/hashcode/hashcode.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package hashcode
5 |
6 | import (
7 | "bytes"
8 | "fmt"
9 | "hash/crc32"
10 | )
11 |
12 | // String hashes a string to a unique hashcode.
13 | //
14 | // crc32 returns a uint32, but for our use we need
15 | // and non negative integer. Here we cast to an integer
16 | // and invert it if the result is negative.
17 | func String(s string) int {
18 | v := int(crc32.ChecksumIEEE([]byte(s)))
19 | if v >= 0 {
20 | return v
21 | }
22 | if -v >= 0 {
23 | return -v
24 | }
25 | // v == MinInt
26 | return 0
27 | }
28 |
29 | // Strings hashes a list of strings to a unique hashcode.
30 | func Strings(strings []string) string {
31 | var buf bytes.Buffer
32 |
33 | for _, s := range strings {
34 | buf.WriteString(fmt.Sprintf("%s-", s))
35 | }
36 |
37 | return fmt.Sprintf("%d", String(buf.String()))
38 | }
39 |
--------------------------------------------------------------------------------
/internal/hashcode/hashcode_test.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package hashcode
5 |
6 | import (
7 | "testing"
8 | )
9 |
10 | func TestString(t *testing.T) {
11 | v := "hello, world"
12 | expected := String(v)
13 | for i := 0; i < 100; i++ {
14 | actual := String(v)
15 | if actual != expected {
16 | t.Fatalf("bad: %#v\n\t%#v", actual, expected)
17 | }
18 | }
19 | }
20 |
21 | func TestStrings(t *testing.T) {
22 | v := []string{"hello", ",", "world"}
23 | expected := Strings(v)
24 | for i := 0; i < 100; i++ {
25 | actual := Strings(v)
26 | if actual != expected {
27 | t.Fatalf("bad: %#v\n\t%#v", actual, expected)
28 | }
29 | }
30 | }
31 |
32 | func TestString_positiveIndex(t *testing.T) {
33 | // "2338615298" hashes to uint32(2147483648) which is math.MinInt32
34 | ips := []string{"192.168.1.3", "192.168.1.5", "2338615298"}
35 | for _, ip := range ips {
36 | if index := String(ip); index < 0 {
37 | t.Fatalf("Bad Index %#v for ip %s", index, ip)
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/internal/provider/archiver.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "fmt"
8 | "os"
9 | )
10 |
11 | type ArchiveDirOpts struct {
12 | Excludes []string
13 | ExcludeSymlinkDirectories bool
14 | }
15 |
16 | type Archiver interface {
17 | ArchiveContent(content []byte, infilename string) error
18 | ArchiveFile(infilename string) error
19 | ArchiveDir(indirname string, opts ArchiveDirOpts) error
20 | ArchiveMultiple(content map[string][]byte) error
21 | SetOutputFileMode(outputFileMode string)
22 | }
23 |
24 | type ArchiverBuilder func(outputPath string) Archiver
25 |
26 | var archiverBuilders = map[string]ArchiverBuilder{
27 | "zip": NewZipArchiver,
28 | "tar.gz": NewTarGzArchiver,
29 | }
30 |
31 | func getArchiver(archiveType string, outputPath string) Archiver {
32 | if builder, ok := archiverBuilders[archiveType]; ok {
33 | return builder(outputPath)
34 | }
35 | return nil
36 | }
37 |
38 | func assertValidFile(infilename string) (os.FileInfo, error) {
39 | fi, err := os.Stat(infilename)
40 | if err != nil && os.IsNotExist(err) {
41 | return fi, fmt.Errorf("could not archive missing file: %s", infilename)
42 | }
43 | return fi, err
44 | }
45 |
46 | func assertValidDir(indirname string) error {
47 | fi, err := os.Stat(indirname)
48 | if err != nil {
49 | if os.IsNotExist(err) {
50 | return fmt.Errorf("could not archive missing directory: %s", indirname)
51 | }
52 | return err
53 | }
54 |
55 | if !fi.IsDir() {
56 | return fmt.Errorf("could not archive directory that is a file: %s", indirname)
57 | }
58 |
59 | return nil
60 | }
61 |
--------------------------------------------------------------------------------
/internal/provider/data_source_archive_file.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "context"
8 | "crypto/md5"
9 | "crypto/sha1"
10 | "crypto/sha256"
11 | "crypto/sha512"
12 | "encoding/base64"
13 | "encoding/hex"
14 | "fmt"
15 | "os"
16 | "path"
17 |
18 | "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator"
19 | "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator"
20 | "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
21 | "github.com/hashicorp/terraform-plugin-framework/datasource"
22 | "github.com/hashicorp/terraform-plugin-framework/datasource/schema"
23 | fwpath "github.com/hashicorp/terraform-plugin-framework/path"
24 | "github.com/hashicorp/terraform-plugin-framework/schema/validator"
25 | "github.com/hashicorp/terraform-plugin-framework/types"
26 | )
27 |
28 | var _ datasource.DataSource = (*archiveFileDataSource)(nil)
29 |
30 | func NewArchiveFileDataSource() datasource.DataSource {
31 | return &archiveFileDataSource{}
32 | }
33 |
34 | type archiveFileDataSource struct{}
35 |
36 | func (d *archiveFileDataSource) ConfigValidators(context.Context) []datasource.ConfigValidator {
37 | return []datasource.ConfigValidator{
38 | datasourcevalidator.AtLeastOneOf(
39 | fwpath.MatchRoot("source"),
40 | fwpath.MatchRoot("source_content_filename"),
41 | fwpath.MatchRoot("source_file"),
42 | fwpath.MatchRoot("source_dir"),
43 | ),
44 | }
45 | }
46 |
47 | func (d *archiveFileDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
48 | resp.Schema = schema.Schema{
49 | Description: "Generates an archive from content, a file, or directory of files. " +
50 | "The archive is built during the terraform plan, so you must persist the archive through to the terraform apply. " +
51 | "See the `archive_file` resource for an alternative if you cannot persist the file, " +
52 | "such as in a multi-phase CI or build server context.",
53 | Blocks: map[string]schema.Block{
54 | "source": schema.SetNestedBlock{
55 | Description: "Specifies attributes of a single source file to include into the archive. " +
56 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
57 | "or `source_dir` must be specified.",
58 | NestedObject: schema.NestedBlockObject{
59 | Attributes: map[string]schema.Attribute{
60 | "content": schema.StringAttribute{
61 | Description: "Add this content to the archive with `filename` as the filename.",
62 | Required: true,
63 | },
64 | "filename": schema.StringAttribute{
65 | Description: "Set this as the filename when declaring a `source`.",
66 | Required: true,
67 | },
68 | },
69 | },
70 | Validators: []validator.Set{
71 | setvalidator.ConflictsWith(
72 | fwpath.MatchRoot("source_file"),
73 | fwpath.MatchRoot("source_dir"),
74 | fwpath.MatchRoot("source_content"),
75 | fwpath.MatchRoot("source_content_filename"),
76 | ),
77 | },
78 | },
79 | },
80 | Attributes: map[string]schema.Attribute{
81 | "id": schema.StringAttribute{
82 | Description: "The sha1 checksum hash of the output.",
83 | Computed: true,
84 | },
85 | "type": schema.StringAttribute{
86 | Description: "The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.",
87 | Required: true,
88 | },
89 | "source_content": schema.StringAttribute{
90 | Description: "Add only this content to the archive with `source_content_filename` as the filename. " +
91 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
92 | "or `source_dir` must be specified.",
93 | Optional: true,
94 | Validators: []validator.String{
95 | stringvalidator.ConflictsWith(
96 | fwpath.MatchRoot("source_file"),
97 | fwpath.MatchRoot("source_dir"),
98 | ),
99 | },
100 | },
101 | "source_content_filename": schema.StringAttribute{
102 | Description: "Set this as the filename when using `source_content`. " +
103 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
104 | "or `source_dir` must be specified.",
105 | Optional: true,
106 | Validators: []validator.String{
107 | stringvalidator.ConflictsWith(
108 | fwpath.MatchRoot("source_file"),
109 | fwpath.MatchRoot("source_dir"),
110 | ),
111 | },
112 | },
113 | "source_file": schema.StringAttribute{
114 | Description: "Package this file into the archive. " +
115 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
116 | "or `source_dir` must be specified.",
117 | Optional: true,
118 | Validators: []validator.String{
119 | stringvalidator.ConflictsWith(
120 | fwpath.MatchRoot("source_dir"),
121 | fwpath.MatchRoot("source_content"),
122 | fwpath.MatchRoot("source_content_filename"),
123 | ),
124 | },
125 | },
126 | "source_dir": schema.StringAttribute{
127 | Description: "Package entire contents of this directory into the archive. " +
128 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
129 | "or `source_dir` must be specified.",
130 | Optional: true,
131 | Validators: []validator.String{
132 | stringvalidator.ConflictsWith(
133 | fwpath.MatchRoot("source_file"),
134 | fwpath.MatchRoot("source_content"),
135 | fwpath.MatchRoot("source_content_filename"),
136 | ),
137 | },
138 | },
139 | "excludes": schema.SetAttribute{
140 | Description: "Specify files/directories to ignore when reading the `source_dir`. " +
141 | "Supports glob file matching patterns including doublestar/globstar (`**`) patterns.",
142 | ElementType: types.StringType,
143 | Optional: true,
144 | Validators: []validator.Set{
145 | setvalidator.ConflictsWith(
146 | fwpath.MatchRoot("source_file"),
147 | fwpath.MatchRoot("source_content"),
148 | fwpath.MatchRoot("source_content_filename"),
149 | ),
150 | },
151 | },
152 | "exclude_symlink_directories": schema.BoolAttribute{
153 | Optional: true,
154 | Description: "Boolean flag indicating whether symbolically linked directories should be excluded during " +
155 | "the creation of the archive. Defaults to `false`.",
156 | },
157 | "output_path": schema.StringAttribute{
158 | Description: "The output of the archive file.",
159 | Required: true,
160 | },
161 | "output_size": schema.Int64Attribute{
162 | Description: "The byte size of the output archive file.",
163 | Computed: true,
164 | },
165 | "output_file_mode": schema.StringAttribute{
166 | Description: "String that specifies the octal file mode for all archived files. For example: `\"0666\"`. " +
167 | "Setting this will ensure that cross platform usage of this module will not vary the modes of archived " +
168 | "files (and ultimately checksums) resulting in more deterministic behavior.",
169 | Optional: true,
170 | },
171 | "output_md5": schema.StringAttribute{
172 | Description: "MD5 of output file",
173 | Computed: true,
174 | },
175 | "output_sha": schema.StringAttribute{
176 | Description: "SHA1 checksum of output file",
177 | Computed: true,
178 | },
179 | "output_sha256": schema.StringAttribute{
180 | Description: "SHA256 checksum of output file",
181 | Computed: true,
182 | },
183 | "output_base64sha256": schema.StringAttribute{
184 | Description: "Base64 Encoded SHA256 checksum of output file",
185 | Computed: true,
186 | },
187 | "output_sha512": schema.StringAttribute{
188 | Description: "SHA512 checksum of output file",
189 | Computed: true,
190 | },
191 | "output_base64sha512": schema.StringAttribute{
192 | Description: "Base64 Encoded SHA512 checksum of output file",
193 | Computed: true,
194 | },
195 | },
196 | }
197 | }
198 |
199 | func archive(ctx context.Context, model fileModel) error {
200 | archiveType := model.Type.ValueString()
201 | outputPath := model.OutputPath.ValueString()
202 |
203 | archiver := getArchiver(archiveType, outputPath)
204 | if archiver == nil {
205 | return fmt.Errorf("archive type not supported: %s", archiveType)
206 | }
207 |
208 | outputFileMode := model.OutputFileMode.ValueString()
209 | if outputFileMode != "" {
210 | archiver.SetOutputFileMode(outputFileMode)
211 | }
212 |
213 | switch {
214 | case !model.SourceDir.IsNull():
215 | excludeList := make([]string, len(model.Excludes.Elements()))
216 |
217 | if !model.Excludes.IsNull() {
218 | var elements []types.String
219 | model.Excludes.ElementsAs(ctx, &elements, false)
220 |
221 | for i, elem := range elements {
222 | excludeList[i] = elem.ValueString()
223 | }
224 | }
225 |
226 | opts := ArchiveDirOpts{
227 | Excludes: excludeList,
228 | }
229 |
230 | if !model.ExcludeSymlinkDirectories.IsNull() {
231 | opts.ExcludeSymlinkDirectories = model.ExcludeSymlinkDirectories.ValueBool()
232 | }
233 |
234 | if err := archiver.ArchiveDir(model.SourceDir.ValueString(), opts); err != nil {
235 | return fmt.Errorf("error archiving directory: %s", err)
236 | }
237 | case !model.SourceFile.IsNull():
238 | if err := archiver.ArchiveFile(model.SourceFile.ValueString()); err != nil {
239 | return fmt.Errorf("error archiving file: %s", err)
240 | }
241 | case !model.SourceContentFilename.IsNull():
242 | content := model.SourceContent.ValueString()
243 |
244 | if err := archiver.ArchiveContent([]byte(content), model.SourceContentFilename.ValueString()); err != nil {
245 | return fmt.Errorf("error archiving content: %s", err)
246 | }
247 | case !model.Source.IsNull():
248 | content := make(map[string][]byte)
249 |
250 | var elements []sourceModel
251 | model.Source.ElementsAs(ctx, &elements, false)
252 |
253 | for _, elem := range elements {
254 | content[elem.Filename.ValueString()] = []byte(elem.Content.ValueString())
255 | }
256 |
257 | if err := archiver.ArchiveMultiple(content); err != nil {
258 | return fmt.Errorf("error archiving content: %s", err)
259 | }
260 | }
261 |
262 | return nil
263 | }
264 |
265 | func (d *archiveFileDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
266 | var model fileModel
267 | diags := req.Config.Get(ctx, &model)
268 | resp.Diagnostics.Append(diags...)
269 | if resp.Diagnostics.HasError() {
270 | return
271 | }
272 |
273 | outputPath := model.OutputPath.ValueString()
274 |
275 | outputDirectory := path.Dir(outputPath)
276 | if outputDirectory != "" {
277 | if _, err := os.Stat(outputDirectory); err != nil {
278 | if err := os.MkdirAll(outputDirectory, 0755); err != nil {
279 | resp.Diagnostics.AddError(
280 | "Output path error",
281 | fmt.Sprintf("error creating output path: %s", err),
282 | )
283 | return
284 | }
285 | }
286 | }
287 |
288 | if err := archive(ctx, model); err != nil {
289 | resp.Diagnostics.AddError(
290 | "Archive creation error",
291 | fmt.Sprintf("error creating archive: %s", err),
292 | )
293 | return
294 | }
295 |
296 | // Generate archived file stats
297 | fi, err := os.Stat(outputPath)
298 | if err != nil {
299 | resp.Diagnostics.AddError(
300 | "Archive output error",
301 | fmt.Sprintf("error reading output: %s", err),
302 | )
303 | return
304 | }
305 | model.OutputSize = types.Int64Value(fi.Size())
306 |
307 | checksums, err := genFileChecksums(outputPath)
308 | if err != nil {
309 | resp.Diagnostics.AddError(
310 | "Hash generation error",
311 | fmt.Sprintf("error generating checksums: %s", err),
312 | )
313 | }
314 | model.OutputMd5 = types.StringValue(checksums.md5Hex)
315 | model.OutputSha = types.StringValue(checksums.sha1Hex)
316 | model.OutputSha256 = types.StringValue(checksums.sha256Hex)
317 | model.OutputBase64Sha256 = types.StringValue(checksums.sha256Base64)
318 | model.OutputSha512 = types.StringValue(checksums.sha512Hex)
319 | model.OutputBase64Sha512 = types.StringValue(checksums.sha512Base64)
320 |
321 | model.ID = types.StringValue(checksums.sha1Hex)
322 |
323 | diags = resp.State.Set(ctx, model)
324 | resp.Diagnostics.Append(diags...)
325 | }
326 |
327 | func (d *archiveFileDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
328 | resp.TypeName = req.ProviderTypeName + "_file"
329 | }
330 |
331 | type fileModel struct {
332 | ID types.String `tfsdk:"id"`
333 | Source types.Set `tfsdk:"source"` // sourceModel
334 | Type types.String `tfsdk:"type"`
335 | SourceContent types.String `tfsdk:"source_content"`
336 | SourceContentFilename types.String `tfsdk:"source_content_filename"`
337 | SourceFile types.String `tfsdk:"source_file"`
338 | SourceDir types.String `tfsdk:"source_dir"`
339 | Excludes types.Set `tfsdk:"excludes"`
340 | ExcludeSymlinkDirectories types.Bool `tfsdk:"exclude_symlink_directories"`
341 | OutputPath types.String `tfsdk:"output_path"`
342 | OutputSize types.Int64 `tfsdk:"output_size"`
343 | OutputFileMode types.String `tfsdk:"output_file_mode"`
344 | OutputMd5 types.String `tfsdk:"output_md5"`
345 | OutputSha types.String `tfsdk:"output_sha"`
346 | OutputSha256 types.String `tfsdk:"output_sha256"`
347 | OutputBase64Sha256 types.String `tfsdk:"output_base64sha256"`
348 | OutputSha512 types.String `tfsdk:"output_sha512"`
349 | OutputBase64Sha512 types.String `tfsdk:"output_base64sha512"`
350 | }
351 |
352 | type sourceModel struct {
353 | Content types.String `tfsdk:"content"`
354 | Filename types.String `tfsdk:"filename"`
355 | }
356 |
357 | type fileChecksums struct {
358 | md5Hex string
359 | sha1Hex string
360 | sha256Hex string
361 | sha256Base64 string
362 | sha512Hex string
363 | sha512Base64 string
364 | }
365 |
366 | func genFileChecksums(filename string) (fileChecksums, error) {
367 | var checksums fileChecksums
368 |
369 | data, err := os.ReadFile(filename)
370 | if err != nil {
371 | return checksums, fmt.Errorf("could not compute file '%s' checksum: %s", filename, err)
372 | }
373 |
374 | md5Sum := md5.Sum(data)
375 | checksums.md5Hex = hex.EncodeToString(md5Sum[:])
376 |
377 | sha1Sum := sha1.Sum(data)
378 | checksums.sha1Hex = hex.EncodeToString(sha1Sum[:])
379 |
380 | sha256Sum := sha256.Sum256(data)
381 | checksums.sha256Hex = hex.EncodeToString(sha256Sum[:])
382 | checksums.sha256Base64 = base64.StdEncoding.EncodeToString(sha256Sum[:])
383 |
384 | sha512Sum := sha512.Sum512(data)
385 | checksums.sha512Hex = hex.EncodeToString(sha512Sum[:])
386 | checksums.sha512Base64 = base64.StdEncoding.EncodeToString(sha512Sum[:])
387 |
388 | return checksums, nil
389 | }
390 |
--------------------------------------------------------------------------------
/internal/provider/data_source_archive_file_test.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "fmt"
8 | "os"
9 | "path/filepath"
10 | "testing"
11 |
12 | r "github.com/hashicorp/terraform-plugin-testing/helper/resource"
13 | "github.com/hashicorp/terraform-plugin-testing/terraform"
14 | )
15 |
16 | func TestDataSource_UpgradeFromVersion2_2_0_ContentConfig(t *testing.T) {
17 | td := t.TempDir()
18 |
19 | f := filepath.Join(td, "zip_file_acc_test_upgrade_content_config.zip")
20 |
21 | var fileSize string
22 |
23 | r.ParallelTest(t, r.TestCase{
24 | Steps: []r.TestStep{
25 | {
26 | ExternalProviders: map[string]r.ExternalProvider{
27 | "archive": {
28 | VersionConstraint: "2.2.0",
29 | Source: "hashicorp/archive",
30 | },
31 | },
32 | Config: testAccArchiveFileContentConfig("zip", f),
33 | Check: r.ComposeTestCheckFunc(
34 | testAccArchiveFileSize(f, &fileSize),
35 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
36 | r.TestCheckResourceAttr(
37 | "data.archive_file.foo", "output_base64sha256", "P7VckxoEiUO411WN3nwuS/yOBL4zsbVWkQU9E1I5H6c=",
38 | ),
39 | r.TestCheckResourceAttr(
40 | "data.archive_file.foo", "output_md5", "ea35f0444ea9a3d5641d8760bc2815cc",
41 | ),
42 | r.TestCheckResourceAttr(
43 | "data.archive_file.foo", "output_sha", "019c79c4dc14dbe1edb3e467b2de6a6aad148717",
44 | ),
45 | ),
46 | },
47 | {
48 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
49 | Config: testAccArchiveFileContentConfig("zip", f),
50 | Check: r.ComposeTestCheckFunc(
51 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
52 | r.TestCheckResourceAttr(
53 | "data.archive_file.foo", "output_base64sha256", "P7VckxoEiUO411WN3nwuS/yOBL4zsbVWkQU9E1I5H6c=",
54 | ),
55 | r.TestCheckResourceAttr(
56 | "data.archive_file.foo", "output_md5", "ea35f0444ea9a3d5641d8760bc2815cc",
57 | ),
58 | r.TestCheckResourceAttr(
59 | "data.archive_file.foo", "output_sha", "019c79c4dc14dbe1edb3e467b2de6a6aad148717",
60 | ),
61 | ),
62 | },
63 | },
64 | })
65 | }
66 |
67 | func TestDataSource_UpgradeFromVersion2_2_0_FileConfig(t *testing.T) {
68 | td := t.TempDir()
69 |
70 | f := filepath.Join(td, "zip_file_acc_test_upgrade_file_config.zip")
71 |
72 | var fileSize string
73 |
74 | r.ParallelTest(t, r.TestCase{
75 | Steps: []r.TestStep{
76 | {
77 | ExternalProviders: map[string]r.ExternalProvider{
78 | "archive": {
79 | VersionConstraint: "2.2.0",
80 | Source: "hashicorp/archive",
81 | },
82 | },
83 | Config: testAccArchiveFileFileConfig("zip", f),
84 | Check: r.ComposeTestCheckFunc(
85 | testAccArchiveFileSize(f, &fileSize),
86 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
87 | r.TestCheckResourceAttr(
88 | "data.archive_file.foo", "output_base64sha256", "UTE4f5cWfaR6p0HfOrLILxgvF8UUwiJTjTRwjQTgdWs=",
89 | ),
90 | r.TestCheckResourceAttr(
91 | "data.archive_file.foo", "output_md5", "59fbc9e62af3cbc2f588f97498240dae",
92 | ),
93 | r.TestCheckResourceAttr(
94 | "data.archive_file.foo", "output_sha", "ce4ee1450ab93ac86e11446649e44cea907b6568",
95 | ),
96 | ),
97 | },
98 | {
99 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
100 | Config: testAccArchiveFileFileConfig("zip", f),
101 | Check: r.ComposeTestCheckFunc(
102 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
103 | r.TestCheckResourceAttr(
104 | "data.archive_file.foo", "output_base64sha256", "UTE4f5cWfaR6p0HfOrLILxgvF8UUwiJTjTRwjQTgdWs=",
105 | ),
106 | r.TestCheckResourceAttr(
107 | "data.archive_file.foo", "output_md5", "59fbc9e62af3cbc2f588f97498240dae",
108 | ),
109 | r.TestCheckResourceAttr(
110 | "data.archive_file.foo", "output_sha", "ce4ee1450ab93ac86e11446649e44cea907b6568",
111 | ),
112 | ),
113 | },
114 | },
115 | })
116 | }
117 |
118 | func TestDataSource_UpgradeFromVersion2_2_0_DirConfig(t *testing.T) {
119 | td := t.TempDir()
120 |
121 | f := filepath.Join(td, "zip_file_acc_test_upgrade_dir_config.zip")
122 |
123 | var fileSize string
124 |
125 | r.ParallelTest(t, r.TestCase{
126 | Steps: []r.TestStep{
127 | {
128 | ExternalProviders: map[string]r.ExternalProvider{
129 | "archive": {
130 | VersionConstraint: "2.2.0",
131 | Source: "hashicorp/archive",
132 | },
133 | },
134 | Config: testAccArchiveFileDirConfig("zip", f),
135 | Check: r.ComposeTestCheckFunc(
136 | testAccArchiveFileSize(f, &fileSize),
137 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
138 | r.TestCheckResourceAttr(
139 | "data.archive_file.foo", "output_base64sha256", "ydB8wtq8nK9vQ77VH6YTwoHmyljK46jW+uIJSwCzNpo=",
140 | ),
141 | r.TestCheckResourceAttr(
142 | "data.archive_file.foo", "output_md5", "b73f64a383716070aa4a29563b8b14d4",
143 | ),
144 | r.TestCheckResourceAttr(
145 | "data.archive_file.foo", "output_sha", "76d20a402eefd1cfbdc47886abd4e0909616c191",
146 | ),
147 | ),
148 | },
149 | {
150 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
151 | Config: testAccArchiveFileDirConfig("zip", f),
152 | Check: r.ComposeTestCheckFunc(
153 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
154 | r.TestCheckResourceAttr(
155 | "data.archive_file.foo", "output_base64sha256", "ydB8wtq8nK9vQ77VH6YTwoHmyljK46jW+uIJSwCzNpo=",
156 | ),
157 | r.TestCheckResourceAttr(
158 | "data.archive_file.foo", "output_md5", "b73f64a383716070aa4a29563b8b14d4",
159 | ),
160 | r.TestCheckResourceAttr(
161 | "data.archive_file.foo", "output_sha", "76d20a402eefd1cfbdc47886abd4e0909616c191",
162 | ),
163 | ),
164 | },
165 | },
166 | })
167 | }
168 |
169 | func TestDataSource_UpgradeFromVersion2_2_0_DirExcludesConfig(t *testing.T) {
170 | td := t.TempDir()
171 |
172 | f := filepath.Join(td, "zip_file_acc_test_upgrade_dir_excludes.zip")
173 |
174 | var fileSize, outputSha string
175 |
176 | r.ParallelTest(t, r.TestCase{
177 | Steps: []r.TestStep{
178 | {
179 | ExternalProviders: map[string]r.ExternalProvider{
180 | "archive": {
181 | VersionConstraint: "2.2.0",
182 | Source: "hashicorp/archive",
183 | },
184 | },
185 | Config: testAccArchiveFileDirExcludesConfig("zip", f),
186 | Check: r.ComposeTestCheckFunc(
187 | testAccArchiveFileSize(f, &fileSize),
188 | testExtractResourceAttr("data.archive_file.foo", "output_sha", &outputSha),
189 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
190 | ),
191 | },
192 | {
193 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
194 | Config: testAccArchiveFileDirExcludesConfig("zip", f),
195 | Check: r.ComposeTestCheckFunc(
196 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
197 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_sha", &outputSha),
198 | ),
199 | },
200 | },
201 | })
202 | }
203 |
204 | func TestDataSource_UpgradeFromVersion2_2_0_SourceConfig(t *testing.T) {
205 | td := t.TempDir()
206 |
207 | f := filepath.Join(td, "zip_file_acc_test_upgrade_source.zip")
208 |
209 | var fileSize, outputSha string
210 |
211 | r.ParallelTest(t, r.TestCase{
212 | Steps: []r.TestStep{
213 | {
214 | ExternalProviders: map[string]r.ExternalProvider{
215 | "archive": {
216 | VersionConstraint: "2.2.0",
217 | Source: "hashicorp/archive",
218 | },
219 | },
220 | Config: testAccArchiveFileMultiSourceConfig("zip", f),
221 | Check: r.ComposeTestCheckFunc(
222 | testAccArchiveFileSize(f, &fileSize),
223 | testExtractResourceAttr("data.archive_file.foo", "output_sha", &outputSha),
224 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
225 | ),
226 | },
227 | {
228 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
229 | Config: testAccArchiveFileMultiSourceConfig("zip", f),
230 | Check: r.ComposeTestCheckFunc(
231 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
232 | r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_sha", &outputSha),
233 | ),
234 | },
235 | },
236 | })
237 | }
238 |
239 | func testAccArchiveFileSize(filename string, fileSize *string) r.TestCheckFunc {
240 | return func(s *terraform.State) error {
241 | *fileSize = ""
242 | fi, err := os.Stat(filename)
243 | if err != nil {
244 | return err
245 | }
246 | *fileSize = fmt.Sprintf("%d", fi.Size())
247 | return nil
248 | }
249 | }
250 |
251 | func testAccArchiveFileContentConfig(format, outputPath string) string {
252 | return fmt.Sprintf(`
253 | data "archive_file" "foo" {
254 | type = "%s"
255 | source_content = "This is some content"
256 | source_content_filename = "content.txt"
257 | output_path = "%s"
258 | }
259 | `, format, filepath.ToSlash(outputPath))
260 | }
261 |
262 | func testAccArchiveFileFileConfig(format, outputPath string) string {
263 | return fmt.Sprintf(`
264 | data "archive_file" "foo" {
265 | type = "%s"
266 | source_file = "test-fixtures/test-dir/test-file.txt"
267 | output_path = "%s"
268 | output_file_mode = "0666"
269 | }
270 | `, format, filepath.ToSlash(outputPath))
271 | }
272 |
273 | func testAccArchiveFileDirConfig(format, outputPath string) string {
274 | return fmt.Sprintf(`
275 | data "archive_file" "foo" {
276 | type = "%s"
277 | source_dir = "test-fixtures/test-dir/test-dir1"
278 | output_path = "%s"
279 | output_file_mode = "0666"
280 | }
281 | `, format, filepath.ToSlash(outputPath))
282 | }
283 |
284 | func testAccArchiveFileDirExcludesConfig(format, outputPath string) string {
285 | return fmt.Sprintf(`
286 | data "archive_file" "foo" {
287 | type = "%s"
288 | source_dir = "test-fixtures/test-dir/test-dir1"
289 | excludes = ["test-fixtures/test-dir/test-dir1/file2.txt"]
290 | output_path = "%s"
291 | }
292 | `, format, filepath.ToSlash(outputPath))
293 | }
294 |
295 | func testAccArchiveFileDirExcludesGlobConfig(format, outputPath string) string {
296 | return fmt.Sprintf(`
297 | data "archive_file" "foo" {
298 | type = "%s"
299 | source_dir = "test-fixtures/test-dir/test-dir1"
300 | excludes = ["test-fixtures/test-dir/test-dir1/file2.txt", "**/file[2-3].txt"]
301 | output_path = "%s"
302 | }
303 | `, format, filepath.ToSlash(outputPath))
304 | }
305 |
306 | func testAccArchiveFileMultiSourceConfig(format, outputPath string) string {
307 | return fmt.Sprintf(`
308 | data "archive_file" "foo" {
309 | type = "%s"
310 | source {
311 | filename = "content_1.txt"
312 | content = "This is the content for content_1.txt"
313 | }
314 | source {
315 | filename = "content_2.txt"
316 | content = "This is the content for content_2.txt"
317 | }
318 | output_path = "%s"
319 | }
320 | `, format, filepath.ToSlash(outputPath))
321 | }
322 |
323 | func testAccArchiveSourceConfigMissing(format string) string {
324 | return fmt.Sprintf(`
325 | data "archive_file" "foo" {
326 | type = "%s"
327 | output_path = "path"
328 | }
329 | `, format)
330 | }
331 |
332 | func testAccArchiveSourceConfigConflicting(format string) string {
333 | return fmt.Sprintf(`
334 | data "archive_file" "foo" {
335 | type = "%s"
336 | source {
337 | filename = "content_1.txt"
338 | content = "This is the content for content_1.txt"
339 | }
340 | source_dir = "test-fixtures/test-dir"
341 | output_path = "path"
342 | }
343 | `, format)
344 | }
345 |
346 | //nolint:unparam
347 | func testExtractResourceAttr(resourceName string, attributeName string, attributeValue *string) r.TestCheckFunc {
348 | return func(s *terraform.State) error {
349 | rs, ok := s.RootModule().Resources[resourceName]
350 | if !ok {
351 | return fmt.Errorf("resource name %s not found in state", resourceName)
352 | }
353 |
354 | attrValue, ok := rs.Primary.Attributes[attributeName]
355 | if !ok {
356 | return fmt.Errorf("attribute %s not found in resource %s state", attributeName, resourceName)
357 | }
358 |
359 | *attributeValue = attrValue
360 |
361 | return nil
362 | }
363 | }
364 |
--------------------------------------------------------------------------------
/internal/provider/provider.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "context"
8 |
9 | "github.com/hashicorp/terraform-plugin-framework/datasource"
10 | "github.com/hashicorp/terraform-plugin-framework/provider"
11 | "github.com/hashicorp/terraform-plugin-framework/resource"
12 | )
13 |
14 | func New() provider.Provider {
15 | return &archiveProvider{}
16 | }
17 |
18 | var _ provider.Provider = (*archiveProvider)(nil)
19 |
20 | type archiveProvider struct{}
21 |
22 | func (p *archiveProvider) Schema(context.Context, provider.SchemaRequest, *provider.SchemaResponse) {
23 | }
24 |
25 | func (p *archiveProvider) Configure(context.Context, provider.ConfigureRequest, *provider.ConfigureResponse) {
26 | }
27 |
28 | func (p *archiveProvider) Resources(context.Context) []func() resource.Resource {
29 | return []func() resource.Resource{
30 | NewArchiveFileResource,
31 | }
32 | }
33 |
34 | func (p *archiveProvider) DataSources(context.Context) []func() datasource.DataSource {
35 | return []func() datasource.DataSource{
36 | NewArchiveFileDataSource,
37 | }
38 | }
39 |
40 | func (p *archiveProvider) Metadata(_ context.Context, _ provider.MetadataRequest, resp *provider.MetadataResponse) {
41 | resp.TypeName = "archive"
42 | }
43 |
--------------------------------------------------------------------------------
/internal/provider/provider_test.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "github.com/hashicorp/terraform-plugin-framework/providerserver"
8 | "github.com/hashicorp/terraform-plugin-go/tfprotov5"
9 | )
10 |
11 | //nolint:unparam
12 | func protoV5ProviderFactories() map[string]func() (tfprotov5.ProviderServer, error) {
13 | return map[string]func() (tfprotov5.ProviderServer, error){
14 | "archive": providerserver.NewProtocol5WithError(New()),
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/internal/provider/resource_archive_file.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "context"
8 | "fmt"
9 | "os"
10 | "path"
11 |
12 | "github.com/hashicorp/terraform-plugin-framework-validators/resourcevalidator"
13 | "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator"
14 | "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
15 | "github.com/hashicorp/terraform-plugin-framework/diag"
16 | fwpath "github.com/hashicorp/terraform-plugin-framework/path"
17 | "github.com/hashicorp/terraform-plugin-framework/resource"
18 | "github.com/hashicorp/terraform-plugin-framework/resource/schema"
19 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
20 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier"
21 | "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
22 | "github.com/hashicorp/terraform-plugin-framework/schema/validator"
23 | "github.com/hashicorp/terraform-plugin-framework/types"
24 | )
25 |
26 | var _ resource.Resource = (*archiveFileResource)(nil)
27 |
28 | func NewArchiveFileResource() resource.Resource {
29 | return &archiveFileResource{}
30 | }
31 |
32 | type archiveFileResource struct{}
33 |
34 | func (d *archiveFileResource) ConfigValidators(context.Context) []resource.ConfigValidator {
35 | return []resource.ConfigValidator{
36 | resourcevalidator.AtLeastOneOf(
37 | fwpath.MatchRoot("source"),
38 | fwpath.MatchRoot("source_content_filename"),
39 | fwpath.MatchRoot("source_file"),
40 | fwpath.MatchRoot("source_dir"),
41 | ),
42 | }
43 | }
44 |
45 | func (d *archiveFileResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
46 | resp.Schema = schema.Schema{
47 | Description: "Generates an archive from content, a file, or directory of files.",
48 | Blocks: map[string]schema.Block{
49 | "source": schema.SetNestedBlock{
50 | Description: "Specifies attributes of a single source file to include into the archive. " +
51 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
52 | "or `source_dir` must be specified.",
53 | NestedObject: schema.NestedBlockObject{
54 | Attributes: map[string]schema.Attribute{
55 | "content": schema.StringAttribute{
56 | Description: "Add this content to the archive with `filename` as the filename.",
57 | Required: true,
58 | PlanModifiers: []planmodifier.String{
59 | stringplanmodifier.RequiresReplace(),
60 | },
61 | },
62 | "filename": schema.StringAttribute{
63 | Description: "Set this as the filename when declaring a `source`.",
64 | Required: true,
65 | PlanModifiers: []planmodifier.String{
66 | stringplanmodifier.RequiresReplace(),
67 | },
68 | },
69 | },
70 | },
71 | Validators: []validator.Set{
72 | setvalidator.ConflictsWith(
73 | fwpath.MatchRoot("source_file"),
74 | fwpath.MatchRoot("source_dir"),
75 | fwpath.MatchRoot("source_content"),
76 | fwpath.MatchRoot("source_content_filename"),
77 | ),
78 | },
79 | },
80 | },
81 | Attributes: map[string]schema.Attribute{
82 | "id": schema.StringAttribute{
83 | Description: "The sha1 checksum hash of the output.",
84 | Computed: true,
85 | },
86 | "type": schema.StringAttribute{
87 | Description: "The type of archive to generate. NOTE: `zip` and `tar.gz` is supported.",
88 | Required: true,
89 | PlanModifiers: []planmodifier.String{
90 | stringplanmodifier.RequiresReplace(),
91 | },
92 | },
93 | "source_content": schema.StringAttribute{
94 | Description: "Add only this content to the archive with `source_content_filename` as the filename. " +
95 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
96 | "or `source_dir` must be specified.",
97 | Optional: true,
98 | Validators: []validator.String{
99 | stringvalidator.ConflictsWith(
100 | fwpath.MatchRoot("source_file"),
101 | fwpath.MatchRoot("source_dir"),
102 | ),
103 | },
104 | PlanModifiers: []planmodifier.String{
105 | stringplanmodifier.RequiresReplace(),
106 | },
107 | },
108 | "source_content_filename": schema.StringAttribute{
109 | Description: "Set this as the filename when using `source_content`. " +
110 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
111 | "or `source_dir` must be specified.",
112 | Optional: true,
113 | Validators: []validator.String{
114 | stringvalidator.ConflictsWith(
115 | fwpath.MatchRoot("source_file"),
116 | fwpath.MatchRoot("source_dir"),
117 | ),
118 | },
119 | PlanModifiers: []planmodifier.String{
120 | stringplanmodifier.RequiresReplace(),
121 | },
122 | },
123 | "source_file": schema.StringAttribute{
124 | Description: "Package this file into the archive. " +
125 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
126 | "or `source_dir` must be specified.",
127 | Optional: true,
128 | Validators: []validator.String{
129 | stringvalidator.ConflictsWith(
130 | fwpath.MatchRoot("source_dir"),
131 | fwpath.MatchRoot("source_content"),
132 | fwpath.MatchRoot("source_content_filename"),
133 | ),
134 | },
135 | PlanModifiers: []planmodifier.String{
136 | stringplanmodifier.RequiresReplace(),
137 | },
138 | },
139 | "source_dir": schema.StringAttribute{
140 | Description: "Package entire contents of this directory into the archive. " +
141 | "One and only one of `source`, `source_content_filename` (with `source_content`), `source_file`, " +
142 | "or `source_dir` must be specified.",
143 | Optional: true,
144 | Validators: []validator.String{
145 | stringvalidator.ConflictsWith(
146 | fwpath.MatchRoot("source_file"),
147 | fwpath.MatchRoot("source_content"),
148 | fwpath.MatchRoot("source_content_filename"),
149 | ),
150 | },
151 | PlanModifiers: []planmodifier.String{
152 | stringplanmodifier.RequiresReplace(),
153 | },
154 | },
155 | "excludes": schema.SetAttribute{
156 | Description: "Specify files/directories to ignore when reading the `source_dir`. " +
157 | "Supports glob file matching patterns including doublestar/globstar (`**`) patterns.",
158 | ElementType: types.StringType,
159 | Optional: true,
160 | Validators: []validator.Set{
161 | setvalidator.ConflictsWith(
162 | fwpath.MatchRoot("source_file"),
163 | fwpath.MatchRoot("source_content"),
164 | fwpath.MatchRoot("source_content_filename"),
165 | ),
166 | },
167 | PlanModifiers: []planmodifier.Set{
168 | setplanmodifier.RequiresReplace(),
169 | },
170 | },
171 | "exclude_symlink_directories": schema.BoolAttribute{
172 | Optional: true,
173 | Description: "Boolean flag indicating whether symbolically linked directories should be excluded during " +
174 | "the creation of the archive. Defaults to `false`.",
175 | },
176 | "output_path": schema.StringAttribute{
177 | Description: "The output of the archive file.",
178 | Required: true,
179 | PlanModifiers: []planmodifier.String{
180 | stringplanmodifier.RequiresReplace(),
181 | },
182 | },
183 | "output_size": schema.Int64Attribute{
184 | Description: "The byte size of the output archive file.",
185 | Computed: true,
186 | },
187 | "output_file_mode": schema.StringAttribute{
188 | Description: "String that specifies the octal file mode for all archived files. For example: `\"0666\"`. " +
189 | "Setting this will ensure that cross platform usage of this module will not vary the modes of archived " +
190 | "files (and ultimately checksums) resulting in more deterministic behavior.",
191 | Optional: true,
192 | PlanModifiers: []planmodifier.String{
193 | stringplanmodifier.RequiresReplace(),
194 | },
195 | },
196 | "output_md5": schema.StringAttribute{
197 | Description: "MD5 of output file",
198 | Computed: true,
199 | },
200 | "output_sha": schema.StringAttribute{
201 | Description: "SHA1 checksum of output file",
202 | Computed: true,
203 | },
204 | "output_sha256": schema.StringAttribute{
205 | Description: "SHA256 checksum of output file",
206 | Computed: true,
207 | },
208 | "output_base64sha256": schema.StringAttribute{
209 | Description: "Base64 Encoded SHA256 checksum of output file",
210 | Computed: true,
211 | },
212 | "output_sha512": schema.StringAttribute{
213 | Description: "SHA512 checksum of output file",
214 | Computed: true,
215 | },
216 | "output_base64sha512": schema.StringAttribute{
217 | Description: "Base64 Encoded SHA512 checksum of output file",
218 | Computed: true,
219 | },
220 | },
221 | }
222 | }
223 |
224 | func (d *archiveFileResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
225 | var model fileModel
226 | diags := req.Plan.Get(ctx, &model)
227 | resp.Diagnostics.Append(diags...)
228 | if resp.Diagnostics.HasError() {
229 | return
230 | }
231 |
232 | resp.Diagnostics.Append(updateModel(ctx, &model)...)
233 |
234 | diags = resp.State.Set(ctx, model)
235 | resp.Diagnostics.Append(diags...)
236 | }
237 |
238 | func (d *archiveFileResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
239 | var model fileModel
240 | diags := req.State.Get(ctx, &model)
241 | resp.Diagnostics.Append(diags...)
242 | if resp.Diagnostics.HasError() {
243 | return
244 | }
245 |
246 | resp.Diagnostics.Append(updateModel(ctx, &model)...)
247 |
248 | diags = resp.State.Set(ctx, model)
249 | resp.Diagnostics.Append(diags...)
250 | }
251 |
252 | func updateModel(ctx context.Context, model *fileModel) diag.Diagnostics {
253 | var diags diag.Diagnostics
254 | outputPath := model.OutputPath.ValueString()
255 |
256 | outputDirectory := path.Dir(outputPath)
257 | if outputDirectory != "" {
258 | if _, err := os.Stat(outputDirectory); err != nil {
259 | if err := os.MkdirAll(outputDirectory, 0755); err != nil {
260 | diags.AddError(
261 | "Output path error",
262 | fmt.Sprintf("error creating output path: %s", err),
263 | )
264 | return diags
265 | }
266 | }
267 | }
268 |
269 | if err := archive(ctx, *model); err != nil {
270 | diags.AddError(
271 | "Archive creation error",
272 | fmt.Sprintf("error creating archive: %s", err),
273 | )
274 | return diags
275 | }
276 |
277 | // Generate archived file stats
278 | fi, err := os.Stat(outputPath)
279 | if err != nil {
280 | diags.AddError(
281 | "Archive output error",
282 | fmt.Sprintf("error reading output: %s", err),
283 | )
284 | return diags
285 | }
286 | model.OutputSize = types.Int64Value(fi.Size())
287 |
288 | checksums, err := genFileChecksums(outputPath)
289 | if err != nil {
290 | diags.AddError(
291 | "Hash generation error",
292 | fmt.Sprintf("error generating hashed: %s", err),
293 | )
294 | return diags
295 | }
296 | model.OutputMd5 = types.StringValue(checksums.md5Hex)
297 | model.OutputSha = types.StringValue(checksums.sha1Hex)
298 | model.OutputSha256 = types.StringValue(checksums.sha256Hex)
299 | model.OutputBase64Sha256 = types.StringValue(checksums.sha256Base64)
300 | model.OutputSha512 = types.StringValue(checksums.sha512Hex)
301 | model.OutputBase64Sha512 = types.StringValue(checksums.sha512Base64)
302 |
303 | model.ID = types.StringValue(checksums.sha1Hex)
304 |
305 | return diags
306 | }
307 |
308 | func (d *archiveFileResource) Update(_ context.Context, _ resource.UpdateRequest, _ *resource.UpdateResponse) {
309 | }
310 |
311 | func (d *archiveFileResource) Delete(_ context.Context, _ resource.DeleteRequest, _ *resource.DeleteResponse) {
312 | }
313 |
314 | func (d *archiveFileResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
315 | resp.TypeName = req.ProviderTypeName + "_file"
316 | }
317 |
--------------------------------------------------------------------------------
/internal/provider/resource_archive_file_test.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "fmt"
8 | "os"
9 | "path/filepath"
10 | "regexp"
11 | "testing"
12 |
13 | r "github.com/hashicorp/terraform-plugin-testing/helper/resource"
14 | )
15 |
16 | func TestResource_UpgradeFromVersion2_2_0_ContentConfig(t *testing.T) {
17 | td := t.TempDir()
18 |
19 | f := filepath.Join(td, "zip_file_acc_test_upgrade_content_config.zip")
20 |
21 | var fileSize string
22 |
23 | r.ParallelTest(t, r.TestCase{
24 | Steps: []r.TestStep{
25 | {
26 | ExternalProviders: map[string]r.ExternalProvider{
27 | "archive": {
28 | VersionConstraint: "2.2.0",
29 | Source: "hashicorp/archive",
30 | },
31 | },
32 | Config: testAccArchiveFileResourceContentConfig("zip", f),
33 | Check: r.ComposeTestCheckFunc(
34 | testAccArchiveFileSize(f, &fileSize),
35 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
36 | r.TestCheckResourceAttr(
37 | "archive_file.foo", "output_base64sha256", "P7VckxoEiUO411WN3nwuS/yOBL4zsbVWkQU9E1I5H6c=",
38 | ),
39 | r.TestCheckResourceAttr(
40 | "archive_file.foo", "output_md5", "ea35f0444ea9a3d5641d8760bc2815cc",
41 | ),
42 | r.TestCheckResourceAttr(
43 | "archive_file.foo", "output_sha", "019c79c4dc14dbe1edb3e467b2de6a6aad148717",
44 | ),
45 | ),
46 | },
47 | {
48 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
49 | Config: testAccArchiveFileResourceContentConfig("zip", f),
50 | PlanOnly: true,
51 | },
52 | {
53 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
54 | Config: testAccArchiveFileResourceContentConfig("zip", f),
55 | Check: r.ComposeTestCheckFunc(
56 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
57 | r.TestCheckResourceAttr(
58 | "archive_file.foo", "output_base64sha256", "P7VckxoEiUO411WN3nwuS/yOBL4zsbVWkQU9E1I5H6c=",
59 | ),
60 | r.TestCheckResourceAttr(
61 | "archive_file.foo", "output_md5", "ea35f0444ea9a3d5641d8760bc2815cc",
62 | ),
63 | r.TestCheckResourceAttr(
64 | "archive_file.foo", "output_sha", "019c79c4dc14dbe1edb3e467b2de6a6aad148717",
65 | ),
66 | ),
67 | },
68 | },
69 | })
70 | }
71 |
72 | func TestResource_UpgradeFromVersion2_2_0_FileConfig(t *testing.T) {
73 | td := t.TempDir()
74 |
75 | f := filepath.Join(td, "zip_file_acc_test_upgrade_file_config.zip")
76 |
77 | var fileSize string
78 |
79 | r.ParallelTest(t, r.TestCase{
80 | Steps: []r.TestStep{
81 | {
82 | ExternalProviders: map[string]r.ExternalProvider{
83 | "archive": {
84 | VersionConstraint: "2.2.0",
85 | Source: "hashicorp/archive",
86 | },
87 | },
88 | Config: testAccArchiveFileResourceFileConfig("zip", f),
89 | Check: r.ComposeTestCheckFunc(
90 | testAccArchiveFileSize(f, &fileSize),
91 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
92 | r.TestCheckResourceAttr(
93 | "archive_file.foo", "output_base64sha256", "UTE4f5cWfaR6p0HfOrLILxgvF8UUwiJTjTRwjQTgdWs=",
94 | ),
95 | r.TestCheckResourceAttr(
96 | "archive_file.foo", "output_md5", "59fbc9e62af3cbc2f588f97498240dae",
97 | ),
98 | r.TestCheckResourceAttr(
99 | "archive_file.foo", "output_sha", "ce4ee1450ab93ac86e11446649e44cea907b6568",
100 | ),
101 | ),
102 | },
103 | {
104 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
105 | Config: testAccArchiveFileResourceFileConfig("zip", f),
106 | PlanOnly: true,
107 | },
108 | {
109 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
110 | Config: testAccArchiveFileResourceFileConfig("zip", f),
111 | Check: r.ComposeTestCheckFunc(
112 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
113 | r.TestCheckResourceAttr(
114 | "archive_file.foo", "output_base64sha256", "UTE4f5cWfaR6p0HfOrLILxgvF8UUwiJTjTRwjQTgdWs=",
115 | ),
116 | r.TestCheckResourceAttr(
117 | "archive_file.foo", "output_md5", "59fbc9e62af3cbc2f588f97498240dae",
118 | ),
119 | r.TestCheckResourceAttr(
120 | "archive_file.foo", "output_sha", "ce4ee1450ab93ac86e11446649e44cea907b6568",
121 | ),
122 | ),
123 | },
124 | },
125 | })
126 | }
127 |
128 | func TestResource_UpgradeFromVersion2_2_0_DirConfig(t *testing.T) {
129 | td := t.TempDir()
130 |
131 | f := filepath.Join(td, "zip_file_acc_test_upgrade_dir_config.zip")
132 |
133 | var fileSize string
134 |
135 | r.ParallelTest(t, r.TestCase{
136 | Steps: []r.TestStep{
137 | {
138 | ExternalProviders: map[string]r.ExternalProvider{
139 | "archive": {
140 | VersionConstraint: "2.2.0",
141 | Source: "hashicorp/archive",
142 | },
143 | },
144 | Config: testAccArchiveFileResourceDirConfig("zip", f),
145 | Check: r.ComposeTestCheckFunc(
146 | testAccArchiveFileSize(f, &fileSize),
147 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
148 | r.TestCheckResourceAttr(
149 | "archive_file.foo", "output_base64sha256", "ydB8wtq8nK9vQ77VH6YTwoHmyljK46jW+uIJSwCzNpo=",
150 | ),
151 | r.TestCheckResourceAttr(
152 | "archive_file.foo", "output_md5", "b73f64a383716070aa4a29563b8b14d4",
153 | ),
154 | r.TestCheckResourceAttr(
155 | "archive_file.foo", "output_sha", "76d20a402eefd1cfbdc47886abd4e0909616c191",
156 | ),
157 | ),
158 | },
159 | {
160 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
161 | Config: testAccArchiveFileResourceDirConfig("zip", f),
162 | PlanOnly: true,
163 | },
164 | {
165 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
166 | Config: testAccArchiveFileResourceDirConfig("zip", f),
167 | Check: r.ComposeTestCheckFunc(
168 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
169 | r.TestCheckResourceAttr(
170 | "archive_file.foo", "output_base64sha256", "ydB8wtq8nK9vQ77VH6YTwoHmyljK46jW+uIJSwCzNpo=",
171 | ),
172 | r.TestCheckResourceAttr(
173 | "archive_file.foo", "output_md5", "b73f64a383716070aa4a29563b8b14d4",
174 | ),
175 | r.TestCheckResourceAttr(
176 | "archive_file.foo", "output_sha", "76d20a402eefd1cfbdc47886abd4e0909616c191",
177 | ),
178 | ),
179 | },
180 | },
181 | })
182 | }
183 |
184 | func TestResource_UpgradeFromVersion2_2_0_DirExcludesConfig(t *testing.T) {
185 | td := t.TempDir()
186 |
187 | f := filepath.Join(td, "zip_file_acc_test_upgrade_dir_excludes.zip")
188 |
189 | var fileSize, outputSha string
190 |
191 | r.ParallelTest(t, r.TestCase{
192 | Steps: []r.TestStep{
193 | {
194 | ExternalProviders: map[string]r.ExternalProvider{
195 | "archive": {
196 | VersionConstraint: "2.2.0",
197 | Source: "hashicorp/archive",
198 | },
199 | },
200 | Config: testAccArchiveFileResourceDirExcludesConfig("zip", f),
201 | Check: r.ComposeTestCheckFunc(
202 | testAccArchiveFileSize(f, &fileSize),
203 | testExtractResourceAttr("archive_file.foo", "output_sha", &outputSha),
204 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
205 | ),
206 | },
207 | {
208 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
209 | Config: testAccArchiveFileResourceDirExcludesConfig("zip", f),
210 | PlanOnly: true,
211 | },
212 | {
213 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
214 | Config: testAccArchiveFileResourceDirExcludesConfig("zip", f),
215 | Check: r.ComposeTestCheckFunc(
216 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
217 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_sha", &outputSha),
218 | ),
219 | },
220 | },
221 | })
222 | }
223 |
224 | func TestResource_UpgradeFromVersion2_2_0_SourceConfig(t *testing.T) {
225 | td := t.TempDir()
226 |
227 | f := filepath.Join(td, "zip_file_acc_test_upgrade_source.zip")
228 |
229 | var fileSize, outputSha string
230 |
231 | r.ParallelTest(t, r.TestCase{
232 | Steps: []r.TestStep{
233 | {
234 | ExternalProviders: map[string]r.ExternalProvider{
235 | "archive": {
236 | VersionConstraint: "2.2.0",
237 | Source: "hashicorp/archive",
238 | },
239 | },
240 | Config: testAccArchiveFileResourceMultiSourceConfig("zip", f),
241 | Check: r.ComposeTestCheckFunc(
242 | testAccArchiveFileSize(f, &fileSize),
243 | testExtractResourceAttr("archive_file.foo", "output_sha", &outputSha),
244 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
245 | ),
246 | },
247 | {
248 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
249 | Config: testAccArchiveFileResourceMultiSourceConfig("zip", f),
250 | PlanOnly: true,
251 | },
252 | {
253 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
254 | Config: testAccArchiveFileResourceMultiSourceConfig("zip", f),
255 | Check: r.ComposeTestCheckFunc(
256 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize),
257 | r.TestCheckResourceAttrPtr("archive_file.foo", "output_sha", &outputSha),
258 | ),
259 | },
260 | },
261 | })
262 | }
263 |
264 | func TestResource_SourceConfigMissing(t *testing.T) {
265 | r.ParallelTest(t, r.TestCase{
266 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
267 | Steps: []r.TestStep{
268 | {
269 | Config: testResourceSourceConfigMissing("zip"),
270 | ExpectError: regexp.MustCompile(`.*At least one of these attributes must be configured:\n\[source,source_content_filename,source_file,source_dir]`),
271 | },
272 | },
273 | })
274 | }
275 |
276 | func TestResource_SourceConfigConflicting(t *testing.T) {
277 | r.ParallelTest(t, r.TestCase{
278 | ProtoV5ProviderFactories: protoV5ProviderFactories(),
279 | Steps: []r.TestStep{
280 | {
281 | Config: testResourceSourceConfigConflicting("zip"),
282 | ExpectError: regexp.MustCompile(`.*Attribute "source_dir" cannot be specified when "source" is specified`),
283 | },
284 | },
285 | })
286 | }
287 |
288 | func alterFileContents(content, path string) {
289 | f, err := os.Create(path)
290 | if err != nil {
291 | panic(fmt.Sprintf("error creating file: %s", err))
292 | }
293 |
294 | defer f.Close()
295 |
296 | _, err = f.Write([]byte(content))
297 | if err != nil {
298 | panic(fmt.Sprintf("error writing file: %s", err))
299 | }
300 | }
301 |
302 | func testAccArchiveFileResourceContentConfig(format, outputPath string) string {
303 | return fmt.Sprintf(`
304 | resource "archive_file" "foo" {
305 | type = "%s"
306 | source_content = "This is some content"
307 | source_content_filename = "content.txt"
308 | output_path = "%s"
309 | }
310 | `, format, filepath.ToSlash(outputPath))
311 | }
312 |
313 | func testAccArchiveFileResourceFileConfig(format, outputPath string) string {
314 | return fmt.Sprintf(`
315 | resource "archive_file" "foo" {
316 | type = "%s"
317 | source_file = "test-fixtures/test-dir/test-file.txt"
318 | output_path = "%s"
319 | output_file_mode = "0666"
320 | }
321 | `, format, filepath.ToSlash(outputPath))
322 | }
323 |
324 | func testAccArchiveFileResourceFileSourceFileConfig(format, sourceFile, outputPath string) string {
325 | return fmt.Sprintf(`
326 | resource "archive_file" "foo" {
327 | type = "%s"
328 | source_file = "%s"
329 | output_path = "%s"
330 | output_file_mode = "0666"
331 | }
332 | `, format,
333 | filepath.ToSlash(sourceFile),
334 | filepath.ToSlash(outputPath))
335 | }
336 |
337 | func testAccArchiveFileResourceDirConfig(format, outputPath string) string {
338 | return fmt.Sprintf(`
339 | resource "archive_file" "foo" {
340 | type = "%s"
341 | source_dir = "test-fixtures/test-dir/test-dir1"
342 | output_path = "%s"
343 | output_file_mode = "0666"
344 | }
345 | `, format, filepath.ToSlash(outputPath))
346 | }
347 |
348 | func testAccArchiveFileResourceDirExcludesConfig(format, outputPath string) string {
349 | return fmt.Sprintf(`
350 | resource "archive_file" "foo" {
351 | type = "%s"
352 | source_dir = "test-fixtures/test-dir"
353 | excludes = ["test-fixtures/test-dir/file2.txt"]
354 | output_path = "%s"
355 | }
356 | `, format, filepath.ToSlash(outputPath))
357 | }
358 |
359 | func testAccArchiveFileResourceDirExcludesGlobConfig(format, outputPath string) string {
360 | return fmt.Sprintf(`
361 | resource "archive_file" "foo" {
362 | type = "%s"
363 | source_dir = "test-fixtures/test-dir"
364 | excludes = ["test-fixtures/test-dir/file2.txt", "**/file[2-3].txt"]
365 | output_path = "%s"
366 | }
367 | `, format, filepath.ToSlash(outputPath))
368 | }
369 |
370 | func testAccArchiveFileResourceMultiSourceConfig(format, outputPath string) string {
371 | return fmt.Sprintf(`
372 | resource "archive_file" "foo" {
373 | type = "%s"
374 | source {
375 | filename = "content_1.txt"
376 | content = "This is the content for content_1.txt"
377 | }
378 | source {
379 | filename = "content_2.txt"
380 | content = "This is the content for content_2.txt"
381 | }
382 | output_path = "%s"
383 | }
384 | `, format, filepath.ToSlash(outputPath))
385 | }
386 |
387 | func testResourceSourceConfigMissing(format string) string {
388 | return fmt.Sprintf(`
389 | resource "archive_file" "foo" {
390 | type = "%s"
391 | output_path = "path"
392 | }
393 | `, format)
394 | }
395 |
396 | func testResourceSourceConfigConflicting(format string) string {
397 | return fmt.Sprintf(`
398 | resource "archive_file" "foo" {
399 | type = "%s"
400 | source {
401 | filename = "content_1.txt"
402 | content = "This is the content for content_1.txt"
403 | }
404 | source_dir = "test-fixtures/test-dir"
405 | output_path = "path"
406 | }
407 | `, format)
408 | }
409 |
--------------------------------------------------------------------------------
/internal/provider/tar_archiver.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "archive/tar"
8 | "compress/gzip"
9 | "errors"
10 | "fmt"
11 | "io"
12 | "os"
13 | "path/filepath"
14 | "sort"
15 | "strconv"
16 | "time"
17 | )
18 |
19 | type TarCompressionType int
20 |
21 | const (
22 | TarCompressionGz TarCompressionType = iota
23 | )
24 |
25 | type TarArchiver struct {
26 | compression TarCompressionType
27 | filepath string
28 | outputFileMode string // Default value "" means unset
29 | fileWriter *os.File
30 | tarWriter *tar.Writer
31 | compressionWriter io.WriteCloser
32 | }
33 |
34 | func NewTarGzArchiver(filepath string) Archiver {
35 | return NewTarArchiver(filepath, TarCompressionGz)
36 | }
37 |
38 | func NewTarArchiver(filepath string, compression TarCompressionType) Archiver {
39 | return &TarArchiver{
40 | filepath: filepath,
41 | compression: compression,
42 | }
43 | }
44 |
45 | func (a *TarArchiver) ArchiveContent(content []byte, infilename string) error {
46 | if err := a.open(); err != nil {
47 | return err
48 | }
49 | defer a.close()
50 |
51 | return a.addContent(content, &tar.Header{
52 | Name: infilename,
53 | Size: int64(len(content)),
54 | ModTime: time.Time{},
55 | })
56 | }
57 |
58 | func (a *TarArchiver) ArchiveFile(infilename string) error {
59 | fi, err := assertValidFile(infilename)
60 | if err != nil {
61 | return err
62 | }
63 |
64 | if err := a.open(); err != nil {
65 | return err
66 | }
67 | defer a.close()
68 |
69 | header := &tar.Header{
70 | Name: filepath.ToSlash(fi.Name()),
71 | Size: fi.Size(),
72 | Mode: int64(fi.Mode()),
73 | ModTime: time.Time{},
74 | }
75 |
76 | if err := a.addFile(infilename, header); err != nil {
77 | return err
78 | }
79 |
80 | return err
81 | }
82 |
83 | func (a *TarArchiver) ArchiveDir(indirname string, opts ArchiveDirOpts) error {
84 | err := assertValidDir(indirname)
85 | if err != nil {
86 | return err
87 | }
88 |
89 | // ensure exclusions are OS compatible paths
90 | for i := range opts.Excludes {
91 | opts.Excludes[i] = filepath.FromSlash(opts.Excludes[i])
92 | }
93 |
94 | // Determine whether an empty archive would be generated.
95 | isArchiveEmpty := true
96 |
97 | err = filepath.Walk(indirname, a.createWalkFunc("", indirname, opts, &isArchiveEmpty, true))
98 | if err != nil {
99 | return err
100 | }
101 |
102 | // Return an error if an empty archive would be generated.
103 | if isArchiveEmpty {
104 | return fmt.Errorf("archive has not been created as it would be empty")
105 | }
106 |
107 | if err := a.open(); err != nil {
108 | return err
109 | }
110 | defer a.close()
111 |
112 | return filepath.Walk(indirname, a.createWalkFunc("", indirname, opts, &isArchiveEmpty, false))
113 | }
114 |
115 | func (a *TarArchiver) createWalkFunc(basePath, indirname string, opts ArchiveDirOpts, isArchiveEmpty *bool, dryRun bool) func(path string, info os.FileInfo, err error) error {
116 | return func(path string, info os.FileInfo, err error) error {
117 | if err != nil {
118 | return fmt.Errorf("error encountered during file walk: %s", err)
119 | }
120 |
121 | relname, err := filepath.Rel(indirname, path)
122 | if err != nil {
123 | return fmt.Errorf("error relativizing file for archival: %s", err)
124 | }
125 |
126 | archivePath := filepath.Join(basePath, relname)
127 |
128 | isMatch, err := checkMatch(archivePath, opts.Excludes)
129 | if err != nil {
130 | return fmt.Errorf("error checking excludes matches: %w", err)
131 | }
132 |
133 | if info.IsDir() {
134 | if isMatch {
135 | return filepath.SkipDir
136 | }
137 | return nil
138 | }
139 |
140 | if isMatch {
141 | return nil
142 | }
143 |
144 | if err != nil {
145 | return err
146 | }
147 |
148 | if info.Mode()&os.ModeSymlink == os.ModeSymlink {
149 | realPath, err := filepath.EvalSymlinks(path)
150 | if err != nil {
151 | return err
152 | }
153 |
154 | realInfo, err := os.Stat(realPath)
155 | if err != nil {
156 | return err
157 | }
158 |
159 | if realInfo.IsDir() {
160 | if !opts.ExcludeSymlinkDirectories {
161 | return filepath.Walk(realPath, a.createWalkFunc(archivePath, realPath, opts, isArchiveEmpty, dryRun))
162 | } else {
163 | return filepath.SkipDir
164 | }
165 | }
166 |
167 | info = realInfo
168 | }
169 |
170 | *isArchiveEmpty = false
171 |
172 | if dryRun {
173 | return nil
174 | }
175 |
176 | header := &tar.Header{
177 | Name: filepath.ToSlash(archivePath),
178 | Size: info.Size(),
179 | Mode: int64(info.Mode()),
180 | ModTime: time.Time{},
181 | }
182 |
183 | return a.addFile(path, header)
184 | }
185 | }
186 |
187 | func (a *TarArchiver) ArchiveMultiple(content map[string][]byte) error {
188 | if err := a.open(); err != nil {
189 | return err
190 | }
191 | defer a.close()
192 |
193 | // Ensure files are processed in the same order so hashes don't change
194 | keys := make([]string, len(content))
195 | i := 0
196 | for k := range content {
197 | keys[i] = k
198 | i++
199 | }
200 | sort.Strings(keys)
201 |
202 | for _, filename := range keys {
203 | header := &tar.Header{
204 | Name: filepath.ToSlash(filename),
205 | Size: int64(len(content[filename])),
206 | ModTime: time.Time{},
207 | }
208 |
209 | if err := a.addContent(content[filename], header); err != nil {
210 | return err
211 | }
212 | }
213 | return nil
214 | }
215 |
216 | func (a *TarArchiver) SetOutputFileMode(outputFileMode string) {
217 | a.outputFileMode = outputFileMode
218 | }
219 |
220 | func (a *TarArchiver) open() error {
221 | var err error
222 |
223 | a.fileWriter, err = os.Create(filepath.ToSlash(a.filepath))
224 | if err != nil {
225 | return err
226 | }
227 |
228 | switch a.compression {
229 | case TarCompressionGz:
230 | a.compressionWriter = gzip.NewWriter(a.fileWriter)
231 | }
232 |
233 | a.tarWriter = tar.NewWriter(a.compressionWriter)
234 | return nil
235 | }
236 |
237 | func (a *TarArchiver) close() {
238 | if a.tarWriter != nil {
239 | err := a.tarWriter.Close()
240 | if err != nil {
241 | fmt.Printf("error closing tarwriter : %s\n\n", err)
242 | }
243 | a.tarWriter = nil
244 | }
245 | if a.compressionWriter != nil {
246 | err := a.compressionWriter.Close()
247 | if err != nil {
248 | fmt.Printf("error closing compressionWriter : %s\n\n", err)
249 | }
250 | a.compressionWriter = nil
251 | }
252 | if a.fileWriter != nil {
253 | err := a.fileWriter.Close()
254 | if err != nil {
255 | fmt.Printf("error closing fileWriter: %s\n\n", err)
256 | }
257 | a.fileWriter = nil
258 | }
259 | }
260 |
261 | func (a *TarArchiver) addFile(filePath string, header *tar.Header) error {
262 | if header == nil {
263 | return fmt.Errorf("tar.Header is nil")
264 | }
265 |
266 | file, err := os.Open(filePath)
267 | if err != nil {
268 | return fmt.Errorf("could not open file '%s', got error '%w'", filePath, err)
269 | }
270 | defer file.Close()
271 |
272 | if a.outputFileMode != "" {
273 | fileMode, err := strconv.ParseInt(a.outputFileMode, 0, 32)
274 | if err != nil {
275 | return fmt.Errorf("error parsing output_file_mode value: %s", a.outputFileMode)
276 | }
277 | header.Mode = fileMode
278 | }
279 |
280 | err = a.tarWriter.WriteHeader(header)
281 | if err != nil {
282 | return fmt.Errorf("could not write header for file '%s', got error '%w'", filePath, err)
283 | }
284 |
285 | _, err = io.Copy(a.tarWriter, file)
286 | if err != nil {
287 | return fmt.Errorf("error reading file for archival: %s", err)
288 | }
289 |
290 | return nil
291 | }
292 |
293 | func (a *TarArchiver) addContent(content []byte, header *tar.Header) error {
294 | if header == nil {
295 | return errors.New("tar.Header is nil")
296 | }
297 |
298 | if a.outputFileMode != "" {
299 | filemode, err := strconv.ParseInt(a.outputFileMode, 0, 32)
300 | if err != nil {
301 | return fmt.Errorf("error parsing output_file_mode value: %s", a.outputFileMode)
302 | }
303 | header.Mode = filemode
304 | }
305 |
306 | if err := a.tarWriter.WriteHeader(header); err != nil {
307 | return fmt.Errorf("could not write header, got error '%w'", err)
308 | }
309 |
310 | _, err := a.tarWriter.Write(content)
311 | if err != nil {
312 | return fmt.Errorf("could not copy data to the tarball, got error '%w'", err)
313 | }
314 |
315 | return nil
316 | }
317 |
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir-with-symlink-dir/test-symlink-dir:
--------------------------------------------------------------------------------
1 | ../test-dir/test-dir2
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir-with-symlink-file/test-file.txt:
--------------------------------------------------------------------------------
1 | This is test content
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir-with-symlink-file/test-symlink.txt:
--------------------------------------------------------------------------------
1 | test-file.txt
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-dir1/file1.txt:
--------------------------------------------------------------------------------
1 | This is file 1
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-dir1/file2.txt:
--------------------------------------------------------------------------------
1 | This is file 2
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-dir1/file3.txt:
--------------------------------------------------------------------------------
1 | This is file 3
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-dir2/file1.txt:
--------------------------------------------------------------------------------
1 | This is file 1
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-dir2/file2.txt:
--------------------------------------------------------------------------------
1 | This is file 2
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-dir2/file3.txt:
--------------------------------------------------------------------------------
1 | This is file 3
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-dir/test-file.txt:
--------------------------------------------------------------------------------
1 | This is test content
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-symlink-dir:
--------------------------------------------------------------------------------
1 | test-dir/test-dir1
--------------------------------------------------------------------------------
/internal/provider/test-fixtures/test-symlink-dir-with-symlink-file:
--------------------------------------------------------------------------------
1 | test-dir-with-symlink-file
--------------------------------------------------------------------------------
/internal/provider/zip_archiver.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "archive/zip"
8 | "fmt"
9 | "os"
10 | "path/filepath"
11 | "sort"
12 | "strconv"
13 | "time"
14 |
15 | "github.com/bmatcuk/doublestar/v4"
16 | )
17 |
18 | type ZipArchiver struct {
19 | filepath string
20 | outputFileMode string // Default value "" means unset
21 | filewriter *os.File
22 | writer *zip.Writer
23 | }
24 |
25 | func NewZipArchiver(filepath string) Archiver {
26 | return &ZipArchiver{
27 | filepath: filepath,
28 | }
29 | }
30 |
31 | func (a *ZipArchiver) ArchiveContent(content []byte, infilename string) error {
32 | if err := a.open(); err != nil {
33 | return err
34 | }
35 | defer a.close()
36 |
37 | f, err := a.writer.Create(filepath.ToSlash(infilename))
38 | if err != nil {
39 | return err
40 | }
41 |
42 | _, err = f.Write(content)
43 | return err
44 | }
45 |
46 | func (a *ZipArchiver) ArchiveFile(infilename string) error {
47 | fi, err := assertValidFile(infilename)
48 | if err != nil {
49 | return err
50 | }
51 |
52 | content, err := os.ReadFile(infilename)
53 | if err != nil {
54 | return err
55 | }
56 |
57 | if err := a.open(); err != nil {
58 | return err
59 | }
60 | defer a.close()
61 |
62 | fh, err := zip.FileInfoHeader(fi)
63 | if err != nil {
64 | return fmt.Errorf("error creating file header: %s", err)
65 | }
66 | fh.Name = filepath.ToSlash(fi.Name())
67 | fh.Method = zip.Deflate
68 | //nolint:staticcheck // This is required as fh.SetModTime has been deprecated since Go 1.10 and using fh.Modified alone isn't enough when using a zero value
69 | fh.SetModTime(time.Time{})
70 |
71 | if a.outputFileMode != "" {
72 | filemode, err := strconv.ParseUint(a.outputFileMode, 0, 32)
73 | if err != nil {
74 | return fmt.Errorf("error parsing output_file_mode value: %s", a.outputFileMode)
75 | }
76 | fh.SetMode(os.FileMode(filemode))
77 | }
78 |
79 | f, err := a.writer.CreateHeader(fh)
80 | if err != nil {
81 | return fmt.Errorf("error creating file inside archive: %s", err)
82 | }
83 |
84 | _, err = f.Write(content)
85 | return err
86 | }
87 |
88 | func checkMatch(fileName string, excludes []string) (value bool, err error) {
89 | for _, exclude := range excludes {
90 | if exclude == "" {
91 | continue
92 | }
93 |
94 | match, err := doublestar.PathMatch(exclude, fileName)
95 | if err != nil {
96 | return false, err
97 | }
98 |
99 | if match {
100 | return true, nil
101 | }
102 | }
103 | return false, nil
104 | }
105 |
106 | func (a *ZipArchiver) ArchiveDir(indirname string, opts ArchiveDirOpts) error {
107 | err := assertValidDir(indirname)
108 | if err != nil {
109 | return err
110 | }
111 |
112 | // ensure exclusions are OS compatible paths
113 | for i := range opts.Excludes {
114 | opts.Excludes[i] = filepath.FromSlash(opts.Excludes[i])
115 | }
116 |
117 | // Determine whether an empty archive would be generated.
118 | isArchiveEmpty := true
119 |
120 | err = filepath.Walk(indirname, a.createWalkFunc("", indirname, opts, &isArchiveEmpty, true))
121 | if err != nil {
122 | return err
123 | }
124 |
125 | // Return an error if an empty archive would be generated.
126 | if isArchiveEmpty {
127 | return fmt.Errorf("archive has not been created as it would be empty")
128 | }
129 |
130 | if err := a.open(); err != nil {
131 | return err
132 | }
133 | defer a.close()
134 |
135 | return filepath.Walk(indirname, a.createWalkFunc("", indirname, opts, &isArchiveEmpty, false))
136 | }
137 |
138 | func (a *ZipArchiver) createWalkFunc(basePath, indirname string, opts ArchiveDirOpts, isArchiveEmpty *bool, dryRun bool) func(path string, info os.FileInfo, err error) error {
139 | return func(path string, info os.FileInfo, err error) error {
140 | if err != nil {
141 | return fmt.Errorf("error encountered during file walk: %s", err)
142 | }
143 |
144 | relname, err := filepath.Rel(indirname, path)
145 | if err != nil {
146 | return fmt.Errorf("error relativizing file for archival: %s", err)
147 | }
148 |
149 | archivePath := filepath.Join(basePath, relname)
150 |
151 | isMatch, err := checkMatch(archivePath, opts.Excludes)
152 | if err != nil {
153 | return fmt.Errorf("error checking excludes matches: %w", err)
154 | }
155 |
156 | if info.IsDir() {
157 | if isMatch {
158 | return filepath.SkipDir
159 | }
160 | return nil
161 | }
162 |
163 | if isMatch {
164 | return nil
165 | }
166 |
167 | if info.Mode()&os.ModeSymlink == os.ModeSymlink {
168 | realPath, err := filepath.EvalSymlinks(path)
169 | if err != nil {
170 | return err
171 | }
172 |
173 | realInfo, err := os.Stat(realPath)
174 | if err != nil {
175 | return err
176 | }
177 |
178 | if realInfo.IsDir() {
179 | if !opts.ExcludeSymlinkDirectories {
180 | return filepath.Walk(realPath, a.createWalkFunc(archivePath, realPath, opts, isArchiveEmpty, dryRun))
181 | } else {
182 | return filepath.SkipDir
183 | }
184 | }
185 |
186 | info = realInfo
187 | }
188 |
189 | *isArchiveEmpty = false
190 |
191 | if dryRun {
192 | return nil
193 | }
194 |
195 | fh, err := zip.FileInfoHeader(info)
196 | if err != nil {
197 | return fmt.Errorf("error creating file header: %s", err)
198 | }
199 | fh.Name = filepath.ToSlash(archivePath)
200 | fh.Method = zip.Deflate
201 | // fh.Modified alone isn't enough when using a zero value
202 | //nolint:staticcheck
203 | fh.SetModTime(time.Time{})
204 |
205 | if a.outputFileMode != "" {
206 | filemode, err := strconv.ParseUint(a.outputFileMode, 0, 32)
207 | if err != nil {
208 | return fmt.Errorf("error parsing output_file_mode value: %s", a.outputFileMode)
209 | }
210 | fh.SetMode(os.FileMode(filemode))
211 | }
212 |
213 | f, err := a.writer.CreateHeader(fh)
214 | if err != nil {
215 | return fmt.Errorf("error creating file inside archive: %s", err)
216 | }
217 | content, err := os.ReadFile(path)
218 | if err != nil {
219 | return fmt.Errorf("error reading file for archival: %s", err)
220 | }
221 | _, err = f.Write(content)
222 | return err
223 | }
224 | }
225 |
226 | func (a *ZipArchiver) ArchiveMultiple(content map[string][]byte) error {
227 | if err := a.open(); err != nil {
228 | return err
229 | }
230 | defer a.close()
231 |
232 | // Ensure files are processed in the same order so hashes don't change
233 | keys := make([]string, len(content))
234 | i := 0
235 | for k := range content {
236 | keys[i] = k
237 | i++
238 | }
239 | sort.Strings(keys)
240 |
241 | for _, filename := range keys {
242 | f, err := a.writer.Create(filepath.ToSlash(filename))
243 | if err != nil {
244 | return err
245 | }
246 | _, err = f.Write(content[filename])
247 | if err != nil {
248 | return err
249 | }
250 | }
251 | return nil
252 | }
253 |
254 | func (a *ZipArchiver) SetOutputFileMode(outputFileMode string) {
255 | a.outputFileMode = outputFileMode
256 | }
257 |
258 | func (a *ZipArchiver) open() error {
259 | f, err := os.Create(a.filepath)
260 | if err != nil {
261 | return err
262 | }
263 | a.filewriter = f
264 | a.writer = zip.NewWriter(f)
265 | return nil
266 | }
267 |
268 | func (a *ZipArchiver) close() {
269 | if a.writer != nil {
270 | a.writer.Close()
271 | a.writer = nil
272 | }
273 | if a.filewriter != nil {
274 | a.filewriter.Close()
275 | a.filewriter = nil
276 | }
277 | }
278 |
--------------------------------------------------------------------------------
/internal/provider/zip_archiver_test.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package archive
5 |
6 | import (
7 | "archive/zip"
8 | "bytes"
9 | "io"
10 | "os"
11 | "path/filepath"
12 | "strconv"
13 | "testing"
14 | "time"
15 | )
16 |
17 | func TestZipArchiver_Content(t *testing.T) {
18 | zipFilePath := filepath.Join(t.TempDir(), "archive-content.zip")
19 |
20 | archiver := NewZipArchiver(zipFilePath)
21 | if err := archiver.ArchiveContent([]byte("This is some content"), "content.txt"); err != nil {
22 | t.Fatalf("unexpected error: %s", err)
23 | }
24 |
25 | ensureContents(t, zipFilePath, map[string][]byte{
26 | "content.txt": []byte("This is some content"),
27 | })
28 | }
29 |
30 | func TestZipArchiver_File(t *testing.T) {
31 | zipFilePath := filepath.Join(t.TempDir(), "archive-file.zip")
32 |
33 | archiver := NewZipArchiver(zipFilePath)
34 | if err := archiver.ArchiveFile("./test-fixtures/test-dir/test-file.txt"); err != nil {
35 | t.Fatalf("unexpected error: %s", err)
36 | }
37 |
38 | ensureContents(t, zipFilePath, map[string][]byte{
39 | "test-file.txt": []byte("This is test content"),
40 | })
41 | }
42 |
43 | //nolint:usetesting
44 | func TestZipArchiver_FileMode(t *testing.T) {
45 | file, err := os.CreateTemp("", "archive-file-mode-test.zip")
46 | if err != nil {
47 | t.Fatal(err)
48 | }
49 |
50 | var (
51 | zipFilePath = file.Name()
52 | toZipPath = filepath.FromSlash("./test-fixtures/test-dir/test-file.txt")
53 | )
54 |
55 | stringArray := [5]string{"0444", "0644", "0666", "0744", "0777"}
56 | for _, element := range stringArray {
57 | archiver := NewZipArchiver(zipFilePath)
58 | archiver.SetOutputFileMode(element)
59 | if err := archiver.ArchiveFile(toZipPath); err != nil {
60 | t.Fatalf("unexpected error: %s", err)
61 | }
62 |
63 | ensureFileMode(t, zipFilePath, element)
64 | }
65 | }
66 |
67 | func TestZipArchiver_FileModified(t *testing.T) {
68 | var (
69 | zipFilePath = filepath.Join(t.TempDir(), "archive-file-modified.zip")
70 | toZipPath = filepath.FromSlash("./test-fixtures/test-dir/test-file.txt")
71 | )
72 |
73 | var zipFunc = func() {
74 | archiver := NewZipArchiver(zipFilePath)
75 | if err := archiver.ArchiveFile(toZipPath); err != nil {
76 | t.Fatalf("unexpected error: %s", err)
77 | }
78 | }
79 |
80 | zipFunc()
81 |
82 | expectedContents, err := os.ReadFile(zipFilePath)
83 | if err != nil {
84 | t.Fatalf("unexpected error: %s", err)
85 | }
86 |
87 | //touch file modified, in the future just in case of weird race issues
88 | newTime := time.Now().Add(1 * time.Hour)
89 | if err := os.Chtimes(toZipPath, newTime, newTime); err != nil {
90 | t.Fatalf("unexpected error: %s", err)
91 | }
92 |
93 | zipFunc()
94 |
95 | actualContents, err := os.ReadFile(zipFilePath)
96 | if err != nil {
97 | t.Fatalf("unexpected error: %s", err)
98 | }
99 |
100 | if !bytes.Equal(expectedContents, actualContents) {
101 | t.Fatalf("zip contents do not match, potentially a modified time issue")
102 | }
103 | }
104 |
105 | func TestZipArchiver_Dir(t *testing.T) {
106 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir.zip")
107 |
108 | archiver := NewZipArchiver(zipFilePath)
109 | if err := archiver.ArchiveDir("./test-fixtures/test-dir/test-dir1", ArchiveDirOpts{}); err != nil {
110 | t.Fatalf("unexpected error: %s", err)
111 | }
112 |
113 | ensureContents(t, zipFilePath, map[string][]byte{
114 | "file1.txt": []byte("This is file 1"),
115 | "file2.txt": []byte("This is file 2"),
116 | "file3.txt": []byte("This is file 3"),
117 | })
118 | }
119 |
120 | func TestZipArchiver_Dir_Exclude(t *testing.T) {
121 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-exclude.zip")
122 |
123 | archiver := NewZipArchiver(zipFilePath)
124 | if err := archiver.ArchiveDir("./test-fixtures/test-dir/test-dir1", ArchiveDirOpts{
125 | Excludes: []string{"file2.txt"},
126 | }); err != nil {
127 | t.Fatalf("unexpected error: %s", err)
128 | }
129 |
130 | ensureContents(t, zipFilePath, map[string][]byte{
131 | "file1.txt": []byte("This is file 1"),
132 | "file3.txt": []byte("This is file 3"),
133 | })
134 | }
135 |
136 | func TestZipArchiver_Dir_Exclude_With_Directory(t *testing.T) {
137 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-exclude-dir.zip")
138 |
139 | archiver := NewZipArchiver(zipFilePath)
140 | if err := archiver.ArchiveDir("./test-fixtures/test-dir", ArchiveDirOpts{
141 | Excludes: []string{"test-dir1", "test-dir2/file2.txt"},
142 | }); err != nil {
143 | t.Fatalf("unexpected error: %s", err)
144 | }
145 |
146 | ensureContents(t, zipFilePath, map[string][]byte{
147 | "test-dir2/file1.txt": []byte("This is file 1"),
148 | "test-dir2/file3.txt": []byte("This is file 3"),
149 | "test-file.txt": []byte("This is test content"),
150 | })
151 | }
152 |
153 | func TestZipArchiver_Multiple(t *testing.T) {
154 | zipFilePath := filepath.Join(t.TempDir(), "archive-content.zip")
155 |
156 | content := map[string][]byte{
157 | "file1.txt": []byte("This is file 1"),
158 | "file2.txt": []byte("This is file 2"),
159 | "file3.txt": []byte("This is file 3"),
160 | }
161 |
162 | archiver := NewZipArchiver(zipFilePath)
163 | if err := archiver.ArchiveMultiple(content); err != nil {
164 | t.Fatalf("unexpected error: %s", err)
165 | }
166 |
167 | ensureContents(t, zipFilePath, content)
168 | }
169 |
170 | func TestZipArchiver_Dir_With_Symlink_File(t *testing.T) {
171 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-file.zip")
172 |
173 | archiver := NewZipArchiver(zipFilePath)
174 | if err := archiver.ArchiveDir("./test-fixtures/test-dir-with-symlink-file", ArchiveDirOpts{}); err != nil {
175 | t.Fatalf("unexpected error: %s", err)
176 | }
177 |
178 | ensureContents(t, zipFilePath, map[string][]byte{
179 | "test-file.txt": []byte("This is test content"),
180 | "test-symlink.txt": []byte("This is test content"),
181 | })
182 | }
183 |
184 | func TestZipArchiver_Dir_DoNotExcludeSymlinkDirectories(t *testing.T) {
185 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-dir.zip")
186 |
187 | archiver := NewZipArchiver(zipFilePath)
188 | if err := archiver.ArchiveDir("./test-fixtures", ArchiveDirOpts{}); err != nil {
189 | t.Fatalf("unexpected error: %s", err)
190 | }
191 |
192 | ensureContents(t, zipFilePath, map[string][]byte{
193 | "test-dir/test-dir1/file1.txt": []byte("This is file 1"),
194 | "test-dir/test-dir1/file2.txt": []byte("This is file 2"),
195 | "test-dir/test-dir1/file3.txt": []byte("This is file 3"),
196 | "test-dir/test-dir2/file1.txt": []byte("This is file 1"),
197 | "test-dir/test-dir2/file2.txt": []byte("This is file 2"),
198 | "test-dir/test-dir2/file3.txt": []byte("This is file 3"),
199 | "test-dir/test-file.txt": []byte("This is test content"),
200 | "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"),
201 | "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"),
202 | "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"),
203 | "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
204 | "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"),
205 | "test-symlink-dir/file1.txt": []byte("This is file 1"),
206 | "test-symlink-dir/file2.txt": []byte("This is file 2"),
207 | "test-symlink-dir/file3.txt": []byte("This is file 3"),
208 | "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
209 | "test-symlink-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"),
210 | })
211 | }
212 |
213 | func TestZipArchiver_Dir_ExcludeSymlinkDirectories(t *testing.T) {
214 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-dir.zip")
215 |
216 | archiver := NewZipArchiver(zipFilePath)
217 | err := archiver.ArchiveDir("./test-fixtures", ArchiveDirOpts{
218 | ExcludeSymlinkDirectories: true,
219 | })
220 |
221 | if err != nil {
222 | t.Errorf("expected no error: %s", err)
223 | }
224 | }
225 |
226 | func TestZipArchiver_Dir_Exclude_DoNotExcludeSymlinkDirectories(t *testing.T) {
227 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-dir.zip")
228 |
229 | archiver := NewZipArchiver(zipFilePath)
230 | if err := archiver.ArchiveDir("./test-fixtures", ArchiveDirOpts{
231 | Excludes: []string{
232 | "test-symlink-dir/file1.txt",
233 | "test-symlink-dir-with-symlink-file/test-symlink.txt",
234 | },
235 | }); err != nil {
236 | t.Fatalf("unexpected error: %s", err)
237 | }
238 |
239 | ensureContents(t, zipFilePath, map[string][]byte{
240 | "test-dir/test-dir1/file1.txt": []byte("This is file 1"),
241 | "test-dir/test-dir1/file2.txt": []byte("This is file 2"),
242 | "test-dir/test-dir1/file3.txt": []byte("This is file 3"),
243 | "test-dir/test-dir2/file1.txt": []byte("This is file 1"),
244 | "test-dir/test-dir2/file2.txt": []byte("This is file 2"),
245 | "test-dir/test-dir2/file3.txt": []byte("This is file 3"),
246 | "test-dir/test-file.txt": []byte("This is test content"),
247 | "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"),
248 | "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"),
249 | "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"),
250 | "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
251 | "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"),
252 | "test-symlink-dir/file2.txt": []byte("This is file 2"),
253 | "test-symlink-dir/file3.txt": []byte("This is file 3"),
254 | "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
255 | })
256 | }
257 |
258 | func TestZipArchiver_Dir_Exclude_Glob_DoNotExcludeSymlinkDirectories(t *testing.T) {
259 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-dir.zip")
260 |
261 | archiver := NewZipArchiver(zipFilePath)
262 | if err := archiver.ArchiveDir("./test-fixtures", ArchiveDirOpts{
263 | Excludes: []string{
264 | "**/file1.txt",
265 | "**/file2.*",
266 | "test-dir-with-symlink-dir/test-symlink-dir",
267 | "test-symlink-dir-with-symlink-file/test-symlink.txt",
268 | },
269 | }); err != nil {
270 | t.Fatalf("unexpected error: %s", err)
271 | }
272 |
273 | ensureContents(t, zipFilePath, map[string][]byte{
274 | "test-dir/test-dir1/file3.txt": []byte("This is file 3"),
275 | "test-dir/test-dir2/file3.txt": []byte("This is file 3"),
276 | "test-dir/test-file.txt": []byte("This is test content"),
277 | "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
278 | "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"),
279 | "test-symlink-dir/file3.txt": []byte("This is file 3"),
280 | "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
281 | })
282 | }
283 |
284 | func TestZipArchiver_Dir_Exclude_ExcludeSymlinkDirectories(t *testing.T) {
285 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-dir.zip")
286 |
287 | archiver := NewZipArchiver(zipFilePath)
288 | err := archiver.ArchiveDir("./test-fixtures", ArchiveDirOpts{
289 | Excludes: []string{
290 | "test-dir/test-dir1/file1.txt",
291 | "test-symlink-dir-with-symlink-file/test-symlink.txt",
292 | },
293 | ExcludeSymlinkDirectories: true,
294 | })
295 |
296 | if err != nil {
297 | t.Errorf("expected no error: %s", err)
298 | }
299 |
300 | ensureContents(t, zipFilePath, map[string][]byte{
301 | "test-dir/test-dir1/file2.txt": []byte("This is file 2"),
302 | "test-dir/test-dir1/file3.txt": []byte("This is file 3"),
303 | "test-dir/test-dir2/file1.txt": []byte("This is file 1"),
304 | "test-dir/test-dir2/file2.txt": []byte("This is file 2"),
305 | "test-dir/test-dir2/file3.txt": []byte("This is file 3"),
306 | "test-dir/test-file.txt": []byte("This is test content"),
307 | "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"),
308 | "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"),
309 | })
310 | }
311 |
312 | func TestZipArchiver_Dir_Exclude_Glob_ExcludeSymlinkDirectories(t *testing.T) {
313 | zipFilePath := filepath.Join(t.TempDir(), "archive-dir-with-symlink-dir.zip")
314 |
315 | archiver := NewZipArchiver(zipFilePath)
316 | err := archiver.ArchiveDir("./test-fixtures", ArchiveDirOpts{
317 | Excludes: []string{
318 | "test-dir/test-dir1/file1.txt",
319 | "**/file[2-3].txt",
320 | "test-dir-with-symlink-file",
321 | },
322 | ExcludeSymlinkDirectories: true,
323 | })
324 |
325 | if err != nil {
326 | t.Errorf("expected no error: %s", err)
327 | }
328 |
329 | ensureContents(t, zipFilePath, map[string][]byte{
330 | "test-dir/test-dir2/file1.txt": []byte("This is file 1"),
331 | "test-dir/test-file.txt": []byte("This is test content"),
332 | })
333 | }
334 |
335 | func ensureContents(t *testing.T, zipfilepath string, wants map[string][]byte) {
336 | t.Helper()
337 | r, err := zip.OpenReader(zipfilepath)
338 | if err != nil {
339 | t.Fatalf("could not open zip file: %s", err)
340 | }
341 | defer r.Close()
342 |
343 | if len(r.File) != len(wants) {
344 | t.Errorf("mismatched file count, got %d, want %d", len(r.File), len(wants))
345 | }
346 | for _, cf := range r.File {
347 | ensureContent(t, wants, cf)
348 | }
349 | }
350 |
351 | func ensureContent(t *testing.T, wants map[string][]byte, got *zip.File) {
352 | t.Helper()
353 | want, ok := wants[got.Name]
354 | if !ok {
355 | t.Errorf("additional file in zip: %s", got.Name)
356 | return
357 | }
358 |
359 | r, err := got.Open()
360 | if err != nil {
361 | t.Errorf("could not open file: %s", err)
362 | }
363 | defer r.Close()
364 | gotContentBytes, err := io.ReadAll(r)
365 | if err != nil {
366 | t.Errorf("could not read file: %s", err)
367 | }
368 |
369 | wantContent := string(want)
370 | gotContent := string(gotContentBytes)
371 | if gotContent != wantContent {
372 | t.Errorf("mismatched content\ngot\n%s\nwant\n%s", gotContent, wantContent)
373 | }
374 | }
375 |
376 | func ensureFileMode(t *testing.T, zipfilepath string, outputFileMode string) {
377 | t.Helper()
378 | r, err := zip.OpenReader(zipfilepath)
379 | if err != nil {
380 | t.Fatalf("could not open zip file: %s", err)
381 | }
382 | defer r.Close()
383 |
384 | filemode, err := strconv.ParseUint(outputFileMode, 0, 32)
385 | if err != nil {
386 | t.Fatalf("error parsing outputFileMode value: %s", outputFileMode)
387 | }
388 | var osfilemode = os.FileMode(filemode)
389 |
390 | for _, cf := range r.File {
391 | if cf.FileInfo().IsDir() {
392 | continue
393 | }
394 |
395 | if cf.Mode() != osfilemode {
396 | t.Fatalf("Expected filemode \"%s\" but was \"%s\"", osfilemode, cf.Mode())
397 | }
398 | }
399 | }
400 |
--------------------------------------------------------------------------------
/main.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | package main
5 |
6 | import (
7 | "context"
8 | "flag"
9 | "log"
10 |
11 | "github.com/hashicorp/terraform-plugin-framework/providerserver"
12 |
13 | p "github.com/hashicorp/terraform-provider-archive/internal/provider"
14 | )
15 |
16 | func main() {
17 | var debug bool
18 |
19 | flag.BoolVar(&debug, "debug", false, "set to true to run the provider with support for debuggers like delve")
20 | flag.Parse()
21 |
22 | err := providerserver.Serve(context.Background(), p.New, providerserver.ServeOpts{
23 | Address: "registry.terraform.io/hashicorp/archive",
24 | Debug: debug,
25 | ProtocolVersion: 5,
26 | })
27 | if err != nil {
28 | log.Fatal(err)
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/templates/data-sources/file.md.tmpl:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}"
3 | subcategory: ""
4 | description: |-
5 | {{ .Description | plainmarkdown | trimspace | prefixlines " " }}
6 | ---
7 |
8 | # {{.Name}} ({{.Type}})
9 |
10 | {{ .Description | trimspace }}
11 |
12 | ## Example Usage
13 |
14 | {{ tffile "examples/data-sources/file/data-source.tf" }}
15 |
16 | {{ tffile "examples/data-sources/file/multiple-files.tf" }}
17 |
18 | {{ tffile "examples/data-sources/file/lambda.tf" }}
19 |
20 | {{ .SchemaMarkdown | trimspace }}
21 |
--------------------------------------------------------------------------------
/templates/index.md.tmpl:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "Provider: Archive"
3 | description: |-
4 | The Archive provider is used to manage archive files.
5 | ---
6 |
7 | # Archive Provider
8 |
9 | The archive provider exposes resources to manage archive files.
10 |
11 | This provider requires no configuration. For information on the resources
12 | it provides, see the navigation bar.
13 |
14 | {{- /* No schema in this provider, so no need for this: .SchemaMarkdown | trimspace */ -}}
15 |
--------------------------------------------------------------------------------
/templates/resources/file.md.tmpl:
--------------------------------------------------------------------------------
1 | ---
2 | page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}"
3 | subcategory: ""
4 | description: |-
5 | {{ .Description | plainmarkdown | trimspace | prefixlines " " }}
6 | ---
7 |
8 | # {{.Name}} ({{.Type}})
9 |
10 | {{ .Description | trimspace }}
11 |
12 | ## Example Usage
13 |
14 | {{ tffile "examples/resources/file/resource.tf" }}
15 |
16 | {{ tffile "examples/resources/file/multiple-files.tf" }}
17 |
18 | {{ tffile "examples/resources/file/lambda.tf" }}
19 |
20 | {{ .SchemaMarkdown | trimspace }}
21 |
--------------------------------------------------------------------------------
/terraform-registry-manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": 1,
3 | "metadata": {
4 | "protocol_versions": ["5.0"]
5 | }
6 | }
--------------------------------------------------------------------------------
/tools/go.mod:
--------------------------------------------------------------------------------
1 | module tools
2 |
3 | go 1.23.7
4 |
5 | require (
6 | github.com/hashicorp/copywrite v0.22.0
7 | github.com/hashicorp/terraform-plugin-docs v0.21.0
8 | )
9 |
10 | require (
11 | github.com/AlecAivazis/survey/v2 v2.3.7 // indirect
12 | github.com/BurntSushi/toml v1.2.1 // indirect
13 | github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect
14 | github.com/Masterminds/goutils v1.1.1 // indirect
15 | github.com/Masterminds/semver/v3 v3.2.0 // indirect
16 | github.com/Masterminds/sprig/v3 v3.2.3 // indirect
17 | github.com/ProtonMail/go-crypto v1.1.3 // indirect
18 | github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
19 | github.com/armon/go-radix v1.0.0 // indirect
20 | github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
21 | github.com/bgentry/speakeasy v0.1.0 // indirect
22 | github.com/bmatcuk/doublestar/v4 v4.8.1 // indirect
23 | github.com/bradleyfalzon/ghinstallation/v2 v2.5.0 // indirect
24 | github.com/cli/go-gh/v2 v2.11.2 // indirect
25 | github.com/cli/safeexec v1.0.0 // indirect
26 | github.com/cloudflare/circl v1.3.7 // indirect
27 | github.com/fatih/color v1.16.0 // indirect
28 | github.com/fsnotify/fsnotify v1.5.4 // indirect
29 | github.com/go-openapi/errors v0.20.2 // indirect
30 | github.com/go-openapi/strfmt v0.21.3 // indirect
31 | github.com/golang-jwt/jwt/v4 v4.5.2 // indirect
32 | github.com/golang/protobuf v1.5.2 // indirect
33 | github.com/google/go-github/v45 v45.2.0 // indirect
34 | github.com/google/go-github/v53 v53.0.0 // indirect
35 | github.com/google/go-querystring v1.1.0 // indirect
36 | github.com/google/uuid v1.3.0 // indirect
37 | github.com/hashicorp/cli v1.1.7 // indirect
38 | github.com/hashicorp/errwrap v1.1.0 // indirect
39 | github.com/hashicorp/go-checkpoint v0.5.0 // indirect
40 | github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
41 | github.com/hashicorp/go-hclog v1.6.3 // indirect
42 | github.com/hashicorp/go-multierror v1.1.1 // indirect
43 | github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
44 | github.com/hashicorp/go-uuid v1.0.3 // indirect
45 | github.com/hashicorp/go-version v1.7.0 // indirect
46 | github.com/hashicorp/hc-install v0.9.1 // indirect
47 | github.com/hashicorp/hcl v1.0.0 // indirect
48 | github.com/hashicorp/terraform-exec v0.22.0 // indirect
49 | github.com/hashicorp/terraform-json v0.24.0 // indirect
50 | github.com/huandu/xstrings v1.3.3 // indirect
51 | github.com/imdario/mergo v0.3.15 // indirect
52 | github.com/inconshreveable/mousetrap v1.0.1 // indirect
53 | github.com/jedib0t/go-pretty v4.3.0+incompatible // indirect
54 | github.com/jedib0t/go-pretty/v6 v6.4.6 // indirect
55 | github.com/joho/godotenv v1.3.0 // indirect
56 | github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
57 | github.com/knadh/koanf v1.5.0 // indirect
58 | github.com/mattn/go-colorable v0.1.14 // indirect
59 | github.com/mattn/go-isatty v0.0.20 // indirect
60 | github.com/mattn/go-runewidth v0.0.15 // indirect
61 | github.com/mergestat/timediff v0.0.3 // indirect
62 | github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
63 | github.com/mitchellh/copystructure v1.2.0 // indirect
64 | github.com/mitchellh/go-homedir v1.1.0 // indirect
65 | github.com/mitchellh/mapstructure v1.5.0 // indirect
66 | github.com/mitchellh/reflectwalk v1.0.2 // indirect
67 | github.com/oklog/ulid v1.3.1 // indirect
68 | github.com/posener/complete v1.2.3 // indirect
69 | github.com/rivo/uniseg v0.4.7 // indirect
70 | github.com/samber/lo v1.37.0 // indirect
71 | github.com/shopspring/decimal v1.3.1 // indirect
72 | github.com/spf13/cast v1.5.0 // indirect
73 | github.com/spf13/cobra v1.6.1 // indirect
74 | github.com/spf13/pflag v1.0.5 // indirect
75 | github.com/thanhpk/randstr v1.0.4 // indirect
76 | github.com/yuin/goldmark v1.7.7 // indirect
77 | github.com/yuin/goldmark-meta v1.1.0 // indirect
78 | github.com/zclconf/go-cty v1.16.2 // indirect
79 | go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect
80 | go.mongodb.org/mongo-driver v1.10.0 // indirect
81 | golang.org/x/crypto v0.36.0 // indirect
82 | golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect
83 | golang.org/x/mod v0.22.0 // indirect
84 | golang.org/x/net v0.38.0 // indirect
85 | golang.org/x/oauth2 v0.8.0 // indirect
86 | golang.org/x/sync v0.12.0 // indirect
87 | golang.org/x/sys v0.31.0 // indirect
88 | golang.org/x/term v0.30.0 // indirect
89 | golang.org/x/text v0.23.0 // indirect
90 | google.golang.org/appengine v1.6.7 // indirect
91 | google.golang.org/protobuf v1.33.0 // indirect
92 | gopkg.in/yaml.v2 v2.4.0 // indirect
93 | gopkg.in/yaml.v3 v3.0.1 // indirect
94 | )
95 |
--------------------------------------------------------------------------------
/tools/tools.go:
--------------------------------------------------------------------------------
1 | // Copyright (c) HashiCorp, Inc.
2 | // SPDX-License-Identifier: MPL-2.0
3 |
4 | //go:build generate
5 |
6 | package tools
7 |
8 | import (
9 | // document generation
10 | _ "github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs"
11 | // copywrite header generation
12 | _ "github.com/hashicorp/copywrite"
13 | )
14 |
15 | // Generate copyright headers
16 | //go:generate go run github.com/hashicorp/copywrite headers -d .. --config ../.copywrite.hcl
17 | // Format Terraform code for use in documentation.
18 | // If you do not have Terraform installed, you can remove the formatting command, but it is suggested
19 | // to ensure the documentation is formatted properly.
20 | //go:generate terraform fmt -recursive ../examples/
21 | // Generate documentation.
22 | //go:generate go run github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-dir ..
23 |
--------------------------------------------------------------------------------
/version/VERSION:
--------------------------------------------------------------------------------
1 | 2.7.1
--------------------------------------------------------------------------------