├── lib
└── fluent
│ └── plugin
│ ├── gcs
│ ├── version.rb
│ └── object_creator.rb
│ └── out_gcs.rb
├── .gitignore
├── bin
├── setup
└── console
├── Rakefile
├── Gemfile
├── .github
└── workflows
│ └── test.yaml
├── LICENSE.txt
├── test
├── helper.rb
└── plugin
│ ├── test_object_creator.rb
│ └── test_out_gcs.rb
├── fluent-plugin-gcs.gemspec
├── CHANGELOG.md
└── README.md
/lib/fluent/plugin/gcs/version.rb:
--------------------------------------------------------------------------------
1 | module Fluent
2 | module GCSPlugin
3 | VERSION = "0.4.4"
4 | end
5 | end
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.bundle/
2 | /.yardoc
3 | /Gemfile.lock
4 | /_yardoc/
5 | /coverage/
6 | /doc/
7 | /pkg/
8 | /spec/reports/
9 | /tmp/
10 | /vendor/
11 |
--------------------------------------------------------------------------------
/bin/setup:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -euo pipefail
3 | IFS=$'\n\t'
4 | set -vx
5 |
6 | bundle install
7 |
8 | # Do any other automated setup that you need to do here
9 |
--------------------------------------------------------------------------------
/Rakefile:
--------------------------------------------------------------------------------
1 | require "bundler"
2 | Bundler::GemHelper.install_tasks
3 |
4 | require "rake/testtask"
5 | Rake::TestTask.new(:test) do |test|
6 | test.libs << "lib" << "test"
7 | test.test_files = FileList["test/plugin/test_*.rb"]
8 | test.verbose = true
9 | end
10 |
11 | task :default => [:test]
12 |
--------------------------------------------------------------------------------
/Gemfile:
--------------------------------------------------------------------------------
1 | # frozen_string_literal: true
2 |
3 | source "https://rubygems.org"
4 |
5 | # Specify your gem's dependencies in fluent-plugin-gcs.gemspec
6 | gemspec
7 |
8 | gem "rake", "~> 13.0"
9 | gem "rr", "= 1.1.2"
10 | gem "test-unit", ">= 3.0.8"
11 | gem "test-unit-rr", ">= 1.0.3"
12 | gem "timecop"
13 | gem "solargraph"
14 |
--------------------------------------------------------------------------------
/bin/console:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 |
3 | require "bundler/setup"
4 | require "fluent/plugin/gcs"
5 |
6 | # You can add fixtures and/or initialization code here to make experimenting
7 | # with your gem easier. You can also use a different console, if you like.
8 |
9 | # (If you use this, don't forget to add pry to your Gemfile!)
10 | # require "pry"
11 | # Pry.start
12 |
13 | require "irb"
14 | IRB.start
15 |
--------------------------------------------------------------------------------
/.github/workflows/test.yaml:
--------------------------------------------------------------------------------
1 | name: Test
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 |
9 | jobs:
10 | test:
11 | runs-on: ubuntu-latest
12 | strategy:
13 | fail-fast: false
14 | matrix:
15 | ruby-version: ['3.1', '3.2']
16 | steps:
17 | - uses: actions/checkout@v4
18 | - name: Set up Ruby
19 | uses: ruby/setup-ruby@v1
20 | with:
21 | ruby-version: ${{ matrix.ruby-version }}
22 | bundler-cache: true
23 | - name: Run tests
24 | run: bundle exec rake test
25 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2016 - Daichi HIRATA
2 |
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 |
7 | http://www.apache.org/licenses/LICENSE-2.0
8 |
9 | Unless required by applicable law or agreed to in writing, software
10 | distributed under the License is distributed on an "AS IS" BASIS,
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | See the License for the specific language governing permissions and
13 | limitations under the License.
14 |
--------------------------------------------------------------------------------
/test/helper.rb:
--------------------------------------------------------------------------------
1 | require "rubygems"
2 | require "bundler"
3 | begin
4 | Bundler.setup(:default, :development)
5 | rescue Bundler::BundlerError => e
6 | $stderr.puts e.message
7 | $stderr.puts "Run `bundle install` to install missing gems"
8 | exit e.status_code
9 | end
10 | require "test/unit"
11 | require "timecop"
12 |
13 | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
14 | $LOAD_PATH.unshift(File.dirname(__FILE__))
15 |
16 | require "fluent/test"
17 | require "fluent/plugin/out_gcs"
18 |
19 | require "rr"
20 | require "test/unit/rr"
21 |
22 | class Test::Unit::TestCase
23 | end
24 |
25 | def silenced
26 | $stdout = StringIO.new
27 |
28 | yield
29 | ensure
30 | $stdout = STDOUT
31 | end
32 |
--------------------------------------------------------------------------------
/fluent-plugin-gcs.gemspec:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | lib = File.expand_path('../lib', __FILE__)
3 | $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4 | require 'fluent/plugin/gcs/version'
5 |
6 | Gem::Specification.new do |spec|
7 | spec.name = "fluent-plugin-gcs"
8 | spec.version = Fluent::GCSPlugin::VERSION
9 | spec.authors = ["Daichi HIRATA"]
10 | spec.email = ["hirata.daichi@gmail.com"]
11 | spec.summary = "Google Cloud Storage output plugin for Fluentd"
12 | spec.description = "Google Cloud Storage output plugin for Fluentd"
13 | spec.homepage = "https://github.com/daichirata/fluent-plugin-gcs"
14 | spec.license = "Apache-2.0"
15 |
16 | spec.files = `git ls-files -z`.split("\x0").reject do |f|
17 | f.match(%r{^(test|spec|features)/})
18 | end
19 | spec.bindir = "exe"
20 | spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
21 | spec.require_paths = ["lib"]
22 |
23 | spec.add_runtime_dependency "fluentd", [">= 0.14.22", "< 2"]
24 | spec.add_runtime_dependency "google-cloud-storage", "~> 1.1"
25 | end
26 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [Unreleased]
2 |
3 | New features / Enhancements
4 |
5 | ## [0.4.2] - 2022/08/16
6 |
7 | Bug fixes
8 |
9 | - [Fix automatic conversion from a hash to keyword arguments](https://github.com/daichirata/fluent-plugin-gcs/pull/22)
10 |
11 | ## [0.4.1] - 2020/04/17
12 |
13 | New features
14 | - [Support blind write to GSC](https://github.com/daichirata/fluent-plugin-gcs/pull/14)
15 |
16 | ## [0.4.0] - 2019/04/01
17 |
18 | New features / Enhancements
19 |
20 | - [Support v0.14 (by @cosmo0920)](https://github.com/daichirata/fluent-plugin-gcs/pull/6)
21 |
22 | ## [0.3.0] - 2017/02/28
23 |
24 | New features / Enhancements
25 |
26 | - [Add support for setting a File's storage_class on file creation](https://github.com/daichirata/fluent-plugin-gcs/pull/4)
27 | - see also https://cloud.google.com/storage/docs/storage-classes
28 |
29 | ## [0.2.0] - 2017/01/16
30 |
31 | Bug fixes
32 |
33 | - [Remove encryption_key_sha256 parameter.](https://github.com/daichirata/fluent-plugin-gcs/pull/2)
34 | - see also. https://github.com/GoogleCloudPlatform/google-cloud-ruby/blob/master/google-cloud-storage/CHANGELOG.md#0230--2016-12-8
35 |
36 | ## [0.1.1] - 2016/11/28
37 |
38 | New features / Enhancements
39 |
40 | - Add support for `%{hostname}` of object_key_format
41 |
42 | [Unreleased]: https://github.com/daichirata/fluent-plugin-gcs/compare/v0.4.0...HEAD
43 | [0.4.0]: https://github.com/daichirata/fluent-plugin-gcs/compare/v0.3.0...v0.4.0
44 | [0.3.0]: https://github.com/daichirata/fluent-plugin-gcs/compare/v0.2.0...v0.3.0
45 | [0.2.0]: https://github.com/daichirata/fluent-plugin-gcs/compare/v0.1.0...v0.2.0
46 | [0.1.1]: https://github.com/daichirata/fluent-plugin-gcs/compare/v0.1.0...v0.1.1
47 |
--------------------------------------------------------------------------------
/lib/fluent/plugin/gcs/object_creator.rb:
--------------------------------------------------------------------------------
1 | require "tempfile"
2 | require "zlib"
3 |
4 | module Fluent
5 | module GCS
6 | def self.discovered_object_creator(store_as, transcoding: nil)
7 | case store_as
8 | when :gzip
9 | Fluent::GCS::GZipObjectCreator.new(transcoding)
10 | when :json
11 | Fluent::GCS::JSONObjectCreator.new
12 | when :text
13 | Fluent::GCS::TextObjectCreator.new
14 | end
15 | end
16 |
17 | class ObjectCreator
18 | def content_type
19 | raise NotImplementedError
20 | end
21 |
22 | def content_encoding
23 | nil
24 | end
25 |
26 | def file_extension
27 | raise NotImplementedError
28 | end
29 |
30 | def write(chunk, io)
31 | raise NotImplementedError
32 | end
33 |
34 | def create(chunk, &block)
35 | Tempfile.create("fluent-plugin-gcs") do |f|
36 | f.binmode
37 | f.sync = true
38 | write(chunk, f)
39 | block.call(f)
40 | end
41 | end
42 | end
43 |
44 | class GZipObjectCreator < ObjectCreator
45 | def initialize(transcoding)
46 | @transcoding = transcoding
47 | end
48 |
49 | def content_type
50 | @transcoding ? "text/plain" : "application/gzip"
51 | end
52 |
53 | def content_encoding
54 | @transcoding ? "gzip" : nil
55 | end
56 |
57 | def file_extension
58 | "gz"
59 | end
60 |
61 | def write(chunk, io)
62 | writer = Zlib::GzipWriter.new(io)
63 | chunk.write_to(writer)
64 | writer.finish
65 | end
66 | end
67 |
68 | class TextObjectCreator < ObjectCreator
69 | def content_type
70 | "text/plain"
71 | end
72 |
73 | def file_extension
74 | "txt"
75 | end
76 |
77 | def write(chunk, io)
78 | chunk.write_to(io)
79 | end
80 | end
81 |
82 | class JSONObjectCreator < TextObjectCreator
83 | def content_type
84 | "application/json"
85 | end
86 |
87 | def file_extension
88 | "json"
89 | end
90 | end
91 | end
92 | end
93 |
--------------------------------------------------------------------------------
/test/plugin/test_object_creator.rb:
--------------------------------------------------------------------------------
1 | require "helper"
2 | require "zlib"
3 |
4 | class GCSObjectCreatorTest < Test::Unit::TestCase
5 | DUMMY_DATA = %[2016-01-01T12:00:00Z\ttest\t{"a":1,"tag":"test","time":"2016-01-01T12:00:00Z"}\n] +
6 | %[2016-01-01T12:00:00Z\ttest\t{"a":2,"tag":"test","time":"2016-01-01T12:00:00Z"}\n]
7 |
8 | class DummyChunk
9 | def write_to(io)
10 | io.write DUMMY_DATA
11 | end
12 | end
13 |
14 | sub_test_case "GZipObjectCreator" do
15 | def test_content_type_and_content_encoding
16 | c = Fluent::GCS::GZipObjectCreator.new(true)
17 | assert_equal "text/plain", c.content_type
18 | assert_equal "gzip", c.content_encoding
19 |
20 | c = Fluent::GCS::GZipObjectCreator.new(false)
21 | assert_equal "application/gzip", c.content_type
22 | assert_equal nil, c.content_encoding
23 | end
24 |
25 | def test_file_extension
26 | c = Fluent::GCS::GZipObjectCreator.new(true)
27 | assert_equal "gz", c.file_extension
28 |
29 | c = Fluent::GCS::GZipObjectCreator.new(false)
30 | assert_equal "gz", c.file_extension
31 | end
32 |
33 | def test_write
34 | Tempfile.create("test_object_creator") do |f|
35 | f.binmode
36 | f.sync = true
37 |
38 | c = Fluent::GCS::GZipObjectCreator.new(true)
39 | c.write(DummyChunk.new, f)
40 | Zlib::GzipReader.open(f.path) do |gz|
41 | assert_equal DUMMY_DATA, gz.read
42 | end
43 |
44 | f.rewind
45 | c = Fluent::GCS::GZipObjectCreator.new(false)
46 | c.write(DummyChunk.new, f)
47 | Zlib::GzipReader.open(f.path) do |gz|
48 | assert_equal DUMMY_DATA, gz.read
49 | end
50 | end
51 | end
52 | end
53 |
54 | sub_test_case "TextObjectCreator" do
55 | def test_content_type_and_content_encoding
56 | c = Fluent::GCS::TextObjectCreator.new
57 | assert_equal "text/plain", c.content_type
58 | assert_equal nil, c.content_encoding
59 | end
60 |
61 | def test_file_extension
62 | c = Fluent::GCS::TextObjectCreator.new
63 | assert_equal "txt", c.file_extension
64 | end
65 |
66 | def test_write
67 | Tempfile.create("test_object_creator") do |f|
68 | f.binmode
69 | f.sync = true
70 |
71 | c = Fluent::GCS::TextObjectCreator.new
72 | c.write(DummyChunk.new, f)
73 | f.rewind
74 | assert_equal DUMMY_DATA, f.read
75 | end
76 | end
77 | end
78 |
79 | sub_test_case "JSONObjectCreator" do
80 | def test_content_type_and_content_encoding
81 | c = Fluent::GCS::JSONObjectCreator.new
82 | assert_equal "application/json", c.content_type
83 | assert_equal nil, c.content_encoding
84 | end
85 |
86 | def test_file_extension
87 | c = Fluent::GCS::JSONObjectCreator.new
88 | assert_equal "json", c.file_extension
89 | end
90 |
91 | def test_write
92 | Tempfile.create("test_object_creator") do |f|
93 | f.binmode
94 | f.sync = true
95 |
96 | c = Fluent::GCS::JSONObjectCreator.new
97 | c.write(DummyChunk.new, f)
98 | f.rewind
99 | assert_equal DUMMY_DATA, f.read
100 | end
101 | end
102 | end
103 | end
104 |
--------------------------------------------------------------------------------
/lib/fluent/plugin/out_gcs.rb:
--------------------------------------------------------------------------------
1 | require "digest/md5"
2 | require "securerandom"
3 | require "socket"
4 |
5 | require "fluent/plugin/gcs/object_creator"
6 | require "fluent/plugin/gcs/version"
7 | require "fluent/plugin/output"
8 |
9 | module Fluent::Plugin
10 | class GCSOutput < Output
11 | Fluent::Plugin.register_output("gcs", self)
12 |
13 | helpers :compat_parameters, :formatter, :inject
14 |
15 | def initialize
16 | super
17 | require "google/cloud/storage"
18 | Google::Apis.logger = log
19 | end
20 |
21 | config_param :project, :string, default: nil,
22 | desc: "Project identifier for GCS"
23 | config_param :keyfile, :string, default: nil,
24 | desc: "Path of GCS service account credentials JSON file"
25 | config_param :credentials_json, :hash, default: nil, secret: true,
26 | desc: "GCS service account credentials in JSON format"
27 | config_param :client_retries, :integer, default: nil,
28 | desc: "Number of times to retry requests on server error"
29 | config_param :client_timeout, :integer, default: nil,
30 | desc: "Default timeout to use in requests"
31 | config_param :bucket, :string,
32 | desc: "Name of a GCS bucket"
33 | config_param :object_key_format, :string, default: "%{path}%{time_slice}_%{index}.%{file_extension}",
34 | desc: "Format of GCS object keys"
35 | config_param :path, :string, default: "",
36 | desc: "Path prefix of the files on GCS"
37 | config_param :store_as, :enum, list: %i(gzip json text), default: :gzip,
38 | desc: "Archive format on GCS"
39 | config_param :transcoding, :bool, default: false,
40 | desc: "Enable the decompressive form of transcoding"
41 | config_param :auto_create_bucket, :bool, default: true,
42 | desc: "Create GCS bucket if it does not exists"
43 | config_param :hex_random_length, :integer, default: 4,
44 | desc: "Max length of `%{hex_random}` placeholder(4-16)"
45 | config_param :overwrite, :bool, default: false,
46 | desc: "Overwrite already existing path"
47 | config_param :format, :string, default: "out_file",
48 | desc: "Change one line format in the GCS object"
49 | config_param :acl, :enum, list: %i(auth_read owner_full owner_read private project_private public_read), default: nil,
50 | desc: "Permission for the object in GCS"
51 | config_param :storage_class, :enum, list: %i(dra nearline coldline multi_regional regional standard), default: nil,
52 | desc: "Storage class of the file"
53 | config_param :encryption_key, :string, default: nil, secret: true,
54 | desc: "Customer-supplied, AES-256 encryption key"
55 | config_param :blind_write, :bool, default: false,
56 | desc: "Whether to check if object already exists by given GCS path. Allows avoiding giving storage.object.get permission"
57 | config_section :object_metadata, required: false do
58 | config_param :key, :string, default: ""
59 | config_param :value, :string, default: ""
60 | end
61 |
62 | DEFAULT_FORMAT_TYPE = "out_file"
63 |
64 | config_section :format do
65 | config_set_default :@type, DEFAULT_FORMAT_TYPE
66 | end
67 |
68 | config_section :buffer do
69 | config_set_default :chunk_keys, ['time']
70 | config_set_default :timekey, (60 * 60 * 24)
71 | end
72 |
73 | MAX_HEX_RANDOM_LENGTH = 32
74 |
75 | def configure(conf)
76 | compat_parameters_convert(conf, :buffer, :formatter, :inject)
77 | super
78 |
79 | if @hex_random_length > MAX_HEX_RANDOM_LENGTH
80 | raise Fluent::ConfigError, "hex_random_length parameter should be set to #{MAX_HEX_RANDOM_LENGTH} characters or less."
81 | end
82 |
83 | # The customer-supplied, AES-256 encryption key that will be used to encrypt the file.
84 | @encryption_opts = {
85 | encryption_key: @encryption_key,
86 | }
87 |
88 | if @object_metadata
89 | @object_metadata_hash = @object_metadata.map {|m| [m.key, m.value] }.to_h
90 | end
91 |
92 | @formatter = formatter_create
93 |
94 | @object_creator = Fluent::GCS.discovered_object_creator(@store_as, transcoding: @transcoding)
95 | # For backward compatibility
96 | # TODO: Remove time_slice_format when end of support compat_parameters
97 | @configured_time_slice_format = conf['time_slice_format']
98 | @time_slice_with_tz = Fluent::Timezone.formatter(@timekey_zone, @configured_time_slice_format || timekey_to_timeformat(@buffer_config['timekey']))
99 |
100 | if @credentials_json
101 | @credentials = @credentials_json
102 | else
103 | @credentials = keyfile
104 | end
105 | end
106 |
107 | def start
108 | @gcs = Google::Cloud::Storage.new(
109 | project: @project,
110 | keyfile: @credentials,
111 | retries: @client_retries,
112 | timeout: @client_timeout
113 | )
114 | @gcs_bucket = @gcs.bucket(@bucket)
115 |
116 | ensure_bucket
117 | super
118 | end
119 |
120 | def format(tag, time, record)
121 | r = inject_values_to_record(tag, time, record)
122 | @formatter.format(tag, time, r)
123 | end
124 |
125 | def multi_workers_ready?
126 | true
127 | end
128 |
129 | def write(chunk)
130 | path = generate_path(chunk)
131 |
132 | @object_creator.create(chunk) do |obj|
133 | opts = {
134 | metadata: @object_metadata_hash,
135 | acl: @acl,
136 | storage_class: @storage_class,
137 | content_type: @object_creator.content_type,
138 | content_encoding: @object_creator.content_encoding,
139 | }
140 | opts.merge!(@encryption_opts)
141 |
142 | log.debug { "out_gcs: upload chunk:#{chunk.key} to gcs://#{@bucket}/#{path} options: #{opts}" }
143 | @gcs_bucket.upload_file(obj.path, path, **opts)
144 | end
145 | end
146 |
147 | private
148 |
149 | def ensure_bucket
150 | return unless @gcs_bucket.nil?
151 |
152 | if !@auto_create_bucket
153 | raise "bucket `#{@bucket}` does not exist"
154 | end
155 | log.info "creating bucket `#{@bucket}`"
156 | @gcs_bucket = @gcs.create_bucket(@bucket)
157 | end
158 |
159 | def hex_random(chunk)
160 | Digest::MD5.hexdigest(chunk.unique_id)[0...@hex_random_length]
161 | end
162 |
163 | def check_object_exists(path)
164 | if !@blind_write
165 | return @gcs_bucket.find_file(path, **@encryption_opts)
166 | else
167 | return false
168 | end
169 | end
170 |
171 | def generate_path(chunk)
172 | metadata = chunk.metadata
173 | time_slice = if metadata.timekey.nil?
174 | ''.freeze
175 | else
176 | @time_slice_with_tz.call(metadata.timekey)
177 | end
178 | tags = {
179 | "%{file_extension}" => @object_creator.file_extension,
180 | "%{hex_random}" => hex_random(chunk),
181 | "%{hostname}" => Socket.gethostname,
182 | "%{path}" => @path,
183 | "%{time_slice}" => time_slice,
184 | }
185 |
186 | prev = nil
187 | i = 0
188 |
189 | until i < 0 do # Until overflow
190 | tags["%{uuid_flush}"] = SecureRandom.uuid
191 | tags["%{index}"] = i
192 |
193 | path = @object_key_format.gsub(Regexp.union(tags.keys), tags)
194 | path = extract_placeholders(path, chunk)
195 | return path unless check_object_exists(path)
196 |
197 | if path == prev
198 | if @overwrite
199 | log.warn "object `#{path}` already exists but overwrites it"
200 | return path
201 | end
202 | raise "object `#{path}` already exists"
203 | end
204 |
205 | i += 1
206 | prev = path
207 | end
208 |
209 | raise "cannot find an unoccupied GCS path"
210 | end
211 |
212 | # This is stolen from Fluentd
213 | def timekey_to_timeformat(timekey)
214 | case timekey
215 | when nil then ''
216 | when 0...60 then '%Y%m%d%H%M%S' # 60 exclusive
217 | when 60...3600 then '%Y%m%d%H%M'
218 | when 3600...86400 then '%Y%m%d%H'
219 | else '%Y%m%d'
220 | end
221 | end
222 | end
223 | end
224 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # fluent-plugin-gcs
2 | [](https://badge.fury.io/rb/fluent-plugin-gcs) [](https://github.com/daichirata/fluent-plugin-gcs/actions/workflows/test.yaml) [](https://codeclimate.com/github/daichirata/fluent-plugin-gcs)
3 |
4 | Google Cloud Storage output plugin for [Fluentd](https://github.com/fluent/fluentd).
5 |
6 | ## Requirements
7 |
8 | | fluent-plugin-gcs | fluentd | ruby |
9 | |--------------------|------------|--------|
10 | | >= 0.4.0 | >= v0.14.0 | >= 2.4 |
11 | | < 0.4.0 | >= v0.12.0 | >= 1.9 |
12 |
13 | ## Installation
14 |
15 | ``` shell
16 | $ gem install fluent-plugin-gcs -v "~> 0.3" --no-document # for fluentd v0.12 or later
17 | $ gem install fluent-plugin-gcs -v "0.4.0" --no-document # for fluentd v0.14 or later
18 | ```
19 |
20 | ## Examples
21 |
22 | ### For v0.14 style
23 |
24 | ```
25 |
26 | @type gcs
27 |
28 | project YOUR_PROJECT
29 | keyfile YOUR_KEYFILE_PATH
30 | bucket YOUR_GCS_BUCKET_NAME
31 | object_key_format %{path}%{time_slice}_%{index}.%{file_extension}
32 | path logs/${tag}/%Y/%m/%d/
33 |
34 | # if you want to use ${tag} or %Y/%m/%d/ like syntax in path / object_key_format,
35 | # need to specify tag for ${tag} and time for %Y/%m/%d in argument.
36 |
37 | @type file
38 | path /var/log/fluent/gcs
39 | timekey 1h # 1 hour partition
40 | timekey_wait 10m
41 | timekey_use_utc true # use utc
42 |
43 |
44 |
45 | @type json
46 |
47 |
48 | ```
49 |
50 | ### For v0.12 style
51 |
52 | ```
53 |
54 | @type gcs
55 |
56 | project YOUR_PROJECT
57 | keyfile YOUR_KEYFILE_PATH
58 | bucket YOUR_GCS_BUCKET_NAME
59 | object_key_format %{path}%{time_slice}_%{index}.%{file_extension}
60 | path logs/
61 | buffer_path /var/log/fluent/gcs
62 |
63 | time_slice_format %Y%m%d-%H
64 | time_slice_wait 10m
65 | utc
66 |
67 | ```
68 |
69 | ## Configuration
70 |
71 | ### Authentication
72 |
73 | You can provide the project and credential information to connect to the Storage
74 | service, or if you are running on Google Compute Engine this configuration is taken care of for you.
75 |
76 | **project**
77 |
78 | Project identifier for GCS. Project are discovered in the following order:
79 | * Specify project in `project`
80 | * Discover project in environment variables `STORAGE_PROJECT`, `GOOGLE_CLOUD_PROJECT`, `GCLOUD_PROJECT`
81 | * Discover GCE credentials
82 |
83 | **keyfile**
84 |
85 | Path of GCS service account credentials JSON file. Credentials are discovered in the following order:
86 | * Specify credentials path in `keyfile`
87 | * Discover credentials path in environment variables `GOOGLE_CLOUD_KEYFILE`, `GCLOUD_KEYFILE`
88 | * Discover credentials JSON in environment variables `GOOGLE_CLOUD_KEYFILE_JSON`, `GCLOUD_KEYFILE_JSON`
89 | * Discover credentials file in the Cloud SDK's path
90 | * Discover GCE credentials
91 |
92 | **client_retries**
93 |
94 | Number of times to retry requests on server error.
95 |
96 | **client_timeout**
97 |
98 | Default timeout to use in requests.
99 |
100 | **bucket (*required)**
101 |
102 | GCS bucket name.
103 |
104 | **store_as**
105 |
106 | Archive format on GCS. You can use serveral format:
107 |
108 | * gzip (default)
109 | * json
110 | * text
111 |
112 | **path**
113 |
114 | path prefix of the files on GCS. Default is "" (no prefix).
115 |
116 | **object_key_format**
117 |
118 | The format of GCS object keys. You can use several built-in variables:
119 |
120 | * %{path}
121 | * %{time_slice}
122 | * %{index}
123 | * %{file_extension}
124 | * %{uuid_flush}
125 | * %{hex_random}
126 | * %{hostname}
127 |
128 | to decide keys dynamically.
129 |
130 | * `%{path}` is exactly the value of `path` configured in the configuration file. E.g., "logs/" in the example configuration above.
131 | * `%{time_slice}` is the time-slice in text that are formatted with `time_slice_format`.
132 | * `%{index}` is the sequential number starts from 0, increments when multiple files are uploaded to GCS in the same time slice.
133 | * `%{file_extention}` is changed by the value of `store_as`.
134 | * gzip - gz
135 | * json - json
136 | * text - txt
137 | * `%{uuid_flush}` a uuid that is replaced everytime the buffer will be flushed
138 | * `%{hex_random}` a random hex string that is replaced for each buffer chunk, not assured to be unique. You can configure the length of string with a `hex_random_length` parameter (Default: 4).
139 | * `%{hostname}` is set to the standard host name of the system of the running server.
140 |
141 | The default format is `%{path}%{time_slice}_%{index}.%{file_extension}`.
142 |
143 | **hex_random_length**
144 |
145 | The length of `%{hex_random}` placeholder.
146 |
147 | **transcoding**
148 |
149 | Enable the decompressive form of transcoding.
150 |
151 | See also [Transcoding of gzip-compressed files](https://cloud.google.com/storage/docs/transcoding).
152 |
153 | **format**
154 |
155 | Change one line format in the GCS object. You can use serveral format:
156 |
157 | * out_file (default)
158 | * json
159 | * ltsv
160 | * single_value
161 |
162 | See also [official Formatter article](http://docs.fluentd.org/articles/formatter-plugin-overview).
163 |
164 | **auto_create_bucket**
165 |
166 | Create GCS bucket if it does not exists. Default is true.
167 |
168 | **acl**
169 |
170 | Permission for the object in GCS. Acceptable values are:
171 |
172 | * `auth_read` - File owner gets OWNER access, and allAuthenticatedUsers get READER access.
173 | * `owner_full` - File owner gets OWNER access, and project team owners get OWNER access.
174 | * `owner_read` - File owner gets OWNER access, and project team owners get READER access.
175 | * `private` - File owner gets OWNER access.
176 | * `project_private` - File owner gets OWNER access, and project team members get access according to their roles.
177 | * `public_read` - File owner gets OWNER access, and allUsers get READER access.
178 |
179 | Default is nil (bucket default object ACL). See also [official document](https://cloud.google.com/storage/docs/access-control/lists).
180 |
181 | **storage_class**
182 |
183 | Storage class of the file. Acceptable values are:
184 |
185 | * `dra` - Durable Reduced Availability
186 | * `nearline` - Nearline Storage
187 | * `coldline` - Coldline Storage
188 | * `multi_regional` - Multi-Regional Storage
189 | * `regional` - Regional Storage
190 | * `standard` - Standard Storage
191 |
192 | Default is nil. See also [official document](https://cloud.google.com/storage/docs/storage-classes).
193 |
194 | **encryption_key**
195 |
196 | You can also choose to provide your own AES-256 key for server-side encryption. See also [Customer-supplied encryption keys](https://cloud.google.com/storage/docs/encryption#customer-supplied).
197 |
198 | `encryption_key_sha256` will be calculated using encryption_key.
199 |
200 | **overwrite**
201 |
202 | Overwrite already existing path. Default is false, which raises an error
203 | if a GCS object of the same path already exists, or increment the
204 | `%{index}` placeholder until finding an absent path.
205 |
206 | **buffer_path (*required)**
207 |
208 | path prefix of the files to buffer logs.
209 |
210 | **time_slice_format**
211 |
212 | Format of the time used as the file name. Default is '%Y%m%d'. Use
213 | '%Y%m%d%H' to split files hourly.
214 |
215 | **time_slice_wait**
216 |
217 | The time to wait old logs. Default is 10 minutes. Specify larger value if
218 | old logs may reache.
219 |
220 | **localtime**
221 |
222 | Use Local time instead of UTC.
223 |
224 | **utc**
225 |
226 | Use UTC instead of local time.
227 |
228 | And see [official Time Sliced Output article](http://docs.fluentd.org/articles/output-plugin-overview#time-sliced-output-parameters)
229 |
230 | **blind_write**
231 |
232 | Doesn't check if an object exists in GCS before writing. Default is false.
233 |
234 | Allows to avoid granting of `storage.objects.get` permission.
235 |
236 | Warning! If the object exists and `storage.objects.delete` permission is not
237 | granted, it will result in an unrecoverable error. Usage of `%{hex_random}` is
238 | recommended.
239 |
240 | ### ObjectMetadata
241 |
242 | User provided web-safe keys and arbitrary string values that will returned with requests for the file as "x-goog-meta-" response headers.
243 |
244 | ```
245 |
246 | @type gcs
247 |
248 |
249 | key KEY_DATA_1
250 | value VALUE_DATA_1
251 |
252 |
253 |
254 | key KEY_DATA_2
255 | value VALUE_DATA_2
256 |
257 |
258 | ```
259 |
--------------------------------------------------------------------------------
/test/plugin/test_out_gcs.rb:
--------------------------------------------------------------------------------
1 | require "helper"
2 | require "fluent/test/driver/output"
3 | require "fluent/test/helpers"
4 | require "google/cloud/storage"
5 |
6 | class GCSOutputTest < Test::Unit::TestCase
7 | include Fluent::Test::Helpers
8 |
9 | def setup
10 | Fluent::Test.setup
11 | end
12 |
13 | CONFIG = <<-EOC
14 | project test_project
15 | keyfile test_keyfile
16 | bucket test_bucket
17 | path log/
18 |
19 | @type memory
20 | timekey_use_utc true
21 |
22 |
23 | log_level debug
24 |
25 | EOC
26 |
27 | def create_driver(conf = CONFIG)
28 | Fluent::Test::Driver::Output.new(Fluent::Plugin::GCSOutput) do
29 | attr_accessor :object_creator, :encryption_opts
30 | end.configure(conf)
31 | end
32 |
33 | def config(*args)
34 | args.join("\n")
35 | end
36 |
37 | sub_test_case "configure" do
38 | def test_configure
39 | driver = create_driver
40 | assert_equal "test_project", driver.instance.project
41 | assert_equal "test_keyfile", driver.instance.keyfile
42 | assert_equal "test_bucket", driver.instance.bucket
43 | assert_equal "%{path}%{time_slice}_%{index}.%{file_extension}", driver.instance.object_key_format
44 | assert_equal "log/", driver.instance.path
45 | assert_equal :gzip, driver.instance.store_as
46 | assert_equal false, driver.instance.transcoding
47 | assert_equal true, driver.instance.auto_create_bucket
48 | assert_equal 4, driver.instance.hex_random_length
49 | assert_equal false, driver.instance.overwrite
50 | assert_equal "out_file", driver.instance.instance_variable_get(:@format)
51 | assert_equal nil, driver.instance.acl
52 | assert_equal nil, driver.instance.storage_class
53 | assert_equal nil, driver.instance.encryption_key
54 | assert_equal [], driver.instance.object_metadata
55 | end
56 |
57 | def test_configure_with_hex_random_length
58 | assert_raise Fluent::ConfigError do
59 | create_driver(config(CONFIG, "hex_random_length 33"))
60 | end
61 | assert_nothing_raised do
62 | create_driver(config(CONFIG, "hex_random_length 32"))
63 | end
64 | end
65 |
66 | def test_configure_with_gzip_object_creator
67 | driver = create_driver(config(CONFIG, "store_as gzip"))
68 | assert_equal true, driver.instance.object_creator.is_a?(Fluent::GCS::GZipObjectCreator)
69 | end
70 |
71 | def test_configure_with_text_object_creator
72 | driver = create_driver(config(CONFIG, "store_as text"))
73 | assert_equal true, driver.instance.object_creator.is_a?(Fluent::GCS::TextObjectCreator)
74 | end
75 |
76 | def test_configure_with_json_object_creator
77 | driver = create_driver(config(CONFIG, "store_as json"))
78 | assert_equal true, driver.instance.object_creator.is_a?(Fluent::GCS::JSONObjectCreator)
79 | end
80 | end
81 |
82 | def test_start
83 | bucket = mock!.bucket("test_bucket") { stub! }
84 |
85 | mock(Google::Cloud::Storage).new(
86 | project: "test_project",
87 | keyfile: "test_keyfile",
88 | retries: 1,
89 | timeout: 2,
90 | ) { bucket }
91 |
92 | driver = create_driver <<-EOC
93 | project test_project
94 | keyfile test_keyfile
95 | bucket test_bucket
96 | client_retries 1
97 | client_timeout 2
98 |
99 | @type memory
100 | timekey_use_utc true
101 |
102 | EOC
103 |
104 | driver.instance.start
105 | end
106 |
107 | def test_ensure_bucket
108 | bucket = stub!
109 | bucket.bucket { nil }
110 | bucket.create_bucket { "ok" }
111 | stub(Google::Cloud::Storage).new { bucket }
112 |
113 | driver = create_driver <<-EOC
114 | bucket test_bucket
115 |
116 | @type memory
117 | timekey_use_utc true
118 |
119 | EOC
120 | driver.instance.start
121 | assert_equal "ok", driver.instance.instance_variable_get(:@gcs_bucket)
122 |
123 | driver2 = create_driver <<-EOC
124 | bucket test_bucket
125 | auto_create_bucket false
126 |
127 | @type memory
128 | timekey_use_utc true
129 |
130 | EOC
131 | assert_raise do
132 | driver2.instance.start
133 | end
134 | end
135 |
136 | sub_test_case "foramt" do
137 | setup do
138 | bucket = stub!
139 | bucket.find_file { false }
140 | bucket.upload_file
141 | storage = stub!
142 | storage.bucket { bucket }
143 | stub(Google::Cloud::Storage).new { storage }
144 |
145 | @time = event_time("2016-01-01 12:00:00 UTC")
146 | end
147 |
148 | def test_format
149 | with_timezone("UTC") do
150 | driver = create_driver(CONFIG)
151 | driver.run(default_tag: "test") do
152 | driver.feed(@time, {"a"=>1})
153 | driver.feed(@time, {"a"=>2})
154 | end
155 | assert_equal %[2016-01-01T12:00:00+00:00\ttest\t{"a":1}\n], driver.formatted[0]
156 | assert_equal %[2016-01-01T12:00:00+00:00\ttest\t{"a":2}\n], driver.formatted[1]
157 | end
158 | end
159 |
160 | def test_format_included_tag_and_time
161 | with_timezone("UTC") do
162 | driver = create_driver(config(CONFIG, 'include_tag_key true', 'include_time_key true'))
163 | driver.run(default_tag: "test") do
164 | driver.feed(@time, {"a"=>1})
165 | driver.feed(@time, {"a"=>2})
166 | end
167 | assert_equal %[2016-01-01T12:00:00+00:00\ttest\t{"a":1,"tag":"test","time":"2016-01-01T12:00:00+00:00"}\n],
168 | driver.formatted[0]
169 | assert_equal %[2016-01-01T12:00:00+00:00\ttest\t{"a":2,"tag":"test","time":"2016-01-01T12:00:00+00:00"}\n],
170 | driver.formatted[1]
171 | end
172 | end
173 |
174 | def test_format_with_format_ltsv
175 | with_timezone("UTC") do
176 | driver = create_driver(config(CONFIG, 'format ltsv'))
177 | driver.run(default_tag: "test") do
178 | driver.feed(@time, {"a"=>1, "b"=>1})
179 | driver.feed(@time, {"a"=>2, "b"=>2})
180 | end
181 | assert_equal %[a:1\tb:1\n], driver.formatted[0]
182 | assert_equal %[a:2\tb:2\n], driver.formatted[1]
183 | end
184 | end
185 |
186 | def test_format_with_format_json
187 | with_timezone("UTC") do
188 | driver = create_driver(config(CONFIG, 'format json'))
189 | driver.run(default_tag: "test") do
190 | driver.feed(@time, {"a"=>1})
191 | driver.feed(@time, {"a"=>2})
192 | end
193 | assert_equal %[{"a":1}\n], driver.formatted[0]
194 | assert_equal %[{"a":2}\n], driver.formatted[1]
195 | end
196 | end
197 |
198 | def test_format_with_format_json_included_tag
199 | with_timezone("UTC") do
200 | driver = create_driver(config(CONFIG, 'format json', 'include_tag_key true'))
201 | driver.run(default_tag: "test") do
202 | driver.feed(@time, {"a"=>1})
203 | driver.feed(@time, {"a"=>2})
204 | end
205 | assert_equal %[{"a":1,"tag":"test"}\n], driver.formatted[0]
206 | assert_equal %[{"a":2,"tag":"test"}\n], driver.formatted[1]
207 | end
208 | end
209 |
210 | def test_format_with_format_json_included_time
211 | with_timezone("UTC") do
212 | driver = create_driver(config(CONFIG, 'format json', 'include_time_key true'))
213 | driver.run(default_tag: "test") do
214 | driver.feed(@time, {"a"=>1})
215 | driver.feed(@time, {"a"=>2})
216 | end
217 | assert_equal %[{"a":1,"time":"2016-01-01T12:00:00+00:00"}\n], driver.formatted[0]
218 | assert_equal %[{"a":2,"time":"2016-01-01T12:00:00+00:00"}\n], driver.formatted[1]
219 | end
220 | end
221 |
222 | def test_format_with_format_json_included_tag_and_time
223 | with_timezone("UTC") do
224 | driver = create_driver(config(CONFIG, 'format json', 'include_tag_key true', 'include_time_key true'))
225 | driver.run(default_tag: "test") do
226 | driver.feed(@time, {"a"=>1})
227 | driver.feed(@time, {"a"=>2})
228 | end
229 | assert_equal %[{"a":1,"tag":"test","time":"2016-01-01T12:00:00+00:00"}\n], driver.formatted[0]
230 | assert_equal %[{"a":2,"tag":"test","time":"2016-01-01T12:00:00+00:00"}\n], driver.formatted[1]
231 | end
232 | end
233 | end
234 |
235 | sub_test_case "write" do
236 | def check_upload(conf, path = nil, enc_opts = nil, upload_opts = nil, &block)
237 | bucket = mock!
238 | if block.nil?
239 | bucket.find_file(path, enc_opts) { false }
240 | bucket.upload_file(anything, path, upload_opts.merge(enc_opts))
241 | else
242 | block.call(bucket)
243 | end
244 | storage = stub!
245 | storage.bucket { bucket }
246 | stub(Google::Cloud::Storage).new { storage }
247 |
248 | driver = create_driver(conf)
249 | driver.run(default_tag: "test") do
250 | driver.feed(event_time("2016-01-01 15:00:00 UTC"), {"a"=>1})
251 | end
252 | end
253 |
254 | def test_write_with_gzip
255 | conf = config(CONFIG, "store_as gzip")
256 |
257 | enc_opts = {
258 | encryption_key: nil,
259 | }
260 |
261 | upload_opts = {
262 | metadata: {},
263 | acl: nil,
264 | storage_class: nil,
265 | content_type: "application/gzip",
266 | content_encoding: nil,
267 | encryption_key: nil,
268 | }.merge(enc_opts)
269 |
270 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
271 | end
272 |
273 | def test_write_with_transcoding
274 | conf = config(CONFIG, "store_as gzip", "transcoding true")
275 |
276 | enc_opts = {
277 | encryption_key: nil,
278 | }
279 |
280 | upload_opts = {
281 | metadata: {},
282 | acl: nil,
283 | storage_class: nil,
284 | content_type: "text/plain",
285 | content_encoding: "gzip",
286 | encryption_key: nil,
287 | }.merge(enc_opts)
288 |
289 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
290 | end
291 |
292 | def test_write_with_text
293 | conf = config(CONFIG, "store_as text")
294 |
295 | enc_opts = {
296 | encryption_key: nil,
297 | }
298 |
299 | upload_opts = {
300 | metadata: {},
301 | acl: nil,
302 | storage_class: nil,
303 | content_type: "text/plain",
304 | content_encoding: nil,
305 | encryption_key: nil,
306 | }.merge(enc_opts)
307 |
308 | check_upload(conf, "log/20160101_0.txt", enc_opts, upload_opts)
309 | end
310 |
311 | def test_write_with_json
312 | conf = config(CONFIG, "store_as json")
313 |
314 | enc_opts = {
315 | encryption_key: nil,
316 | }
317 |
318 | upload_opts = {
319 | metadata: {},
320 | acl: nil,
321 | storage_class: nil,
322 | content_type: "application/json",
323 | content_encoding: nil,
324 | encryption_key: nil,
325 | }.merge(enc_opts)
326 |
327 | check_upload(conf, "log/20160101_0.json", enc_opts, upload_opts)
328 | end
329 |
330 | def test_write_with_utc
331 | conf = config(CONFIG)
332 |
333 | enc_opts = {
334 | encryption_key: nil,
335 | }
336 |
337 | upload_opts = {
338 | metadata: {},
339 | acl: nil,
340 | storage_class: nil,
341 | content_type: "application/gzip",
342 | content_encoding: nil,
343 | encryption_key: nil,
344 | }.merge(enc_opts)
345 |
346 | Timecop.freeze(Time.parse("2016-01-02 01:00:00 JST")) do
347 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
348 | end
349 | end
350 |
351 | def test_write_with_placeholder_in_path
352 | conf = <<-CONFIG
353 | project test_project
354 | keyfile test_keyfile
355 | bucket test_bucket
356 | path log/${tag}/
357 |
358 | @type memory
359 | timekey 86400
360 | timekey_wait 10m
361 | timekey_use_utc true
362 |
363 | CONFIG
364 |
365 | enc_opts = {
366 | encryption_key: nil,
367 | }
368 |
369 | upload_opts = {
370 | metadata: {},
371 | acl: nil,
372 | storage_class: nil,
373 | content_type: "application/gzip",
374 | content_encoding: nil,
375 | encryption_key: nil,
376 | }.merge(enc_opts)
377 |
378 | Timecop.freeze(Time.parse("2016-01-02 01:00:00 JST")) do
379 | check_upload(conf, "log/test/20160101_0.gz", enc_opts, upload_opts)
380 | end
381 | end
382 |
383 | def test_write_with_encryption
384 | conf = config(CONFIG, "encryption_key aaa")
385 |
386 | enc_opts = {
387 | encryption_key: "aaa",
388 | }
389 |
390 | upload_opts = {
391 | metadata: {},
392 | acl: nil,
393 | storage_class: nil,
394 | content_type: "application/gzip",
395 | content_encoding: nil,
396 | encryption_key: "aaa",
397 | }.merge(enc_opts)
398 |
399 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
400 | end
401 |
402 | def test_write_with_acl
403 | conf = config(CONFIG, "acl auth_read")
404 |
405 | enc_opts = {
406 | encryption_key: nil,
407 | }
408 |
409 | upload_opts = {
410 | metadata: {},
411 | acl: :auth_read,
412 | storage_class: nil,
413 | content_type: "application/gzip",
414 | content_encoding: nil,
415 | encryption_key: nil,
416 | }.merge(enc_opts)
417 |
418 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
419 | end
420 |
421 | def test_write_with_storage_class
422 | conf = config(CONFIG, "storage_class regional")
423 |
424 | enc_opts = {
425 | encryption_key: nil,
426 | }
427 |
428 | upload_opts = {
429 | metadata: {},
430 | acl: nil,
431 | storage_class: :regional,
432 | content_type: "application/gzip",
433 | content_encoding: nil,
434 | encryption_key: nil,
435 | }.merge(enc_opts)
436 |
437 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
438 | end
439 |
440 | def test_write_with_object_metadata
441 | conf = config(CONFIG, <<-EOM)
442 |
443 | key test-key-1
444 | value test-value-1
445 |
446 |
447 | key test-key-2
448 | value test-value-2
449 |
450 | EOM
451 |
452 | enc_opts = {
453 | encryption_key: nil,
454 | }
455 |
456 | upload_opts = {
457 | metadata: {"test-key-1" => "test-value-1", "test-key-2" => "test-value-2"},
458 | acl: nil,
459 | storage_class: nil,
460 | content_type: "application/gzip",
461 | content_encoding: nil,
462 | encryption_key: nil,
463 | }.merge(enc_opts)
464 |
465 | check_upload(conf, "log/20160101_0.gz", enc_opts, upload_opts)
466 | end
467 |
468 | def test_write_with_custom_object_key_format
469 | conf = config(CONFIG, "object_key_format %{path}%{file_extension}/%{hex_random}/%{hostname}/%{index}/%{time_slice}/%{uuid_flush}")
470 |
471 | enc_opts = {
472 | encryption_key: nil,
473 | }
474 |
475 | upload_opts = {
476 | metadata: {},
477 | acl: nil,
478 | storage_class: nil,
479 | content_type: "application/gzip",
480 | content_encoding: nil,
481 | encryption_key: nil,
482 | }.merge(enc_opts)
483 |
484 | any_instance_of(Fluent::Plugin::Buffer::MemoryChunk) do |b|
485 | # Memo: Digest::MD5.hexdigest("unique_id") => "69080cee5b6d4c35a8bbf5c48335fe08"
486 | stub(b).unique_id { "unique_id" }
487 | end
488 | stub(SecureRandom).uuid { "uuid1" }
489 | stub(SecureRandom).uuid { "uuid2" }
490 | stub(Socket).gethostname { "test-hostname" }
491 |
492 | check_upload(conf) do |bucket|
493 | bucket.find_file(anything, enc_opts) { true }
494 | bucket.find_file(anything, enc_opts) { false }
495 | bucket.upload_file(anything, "log/gz/6908/test-hostname/1/20160101/uuid2", upload_opts.merge(enc_opts))
496 | end
497 | end
498 |
499 | def test_write_with_overwrite_true
500 | conf = config(CONFIG, "object_key_format %{path}%{time_slice}.%{file_extension}", "overwrite true")
501 |
502 | enc_opts = {
503 | encryption_key: nil,
504 | }
505 |
506 | upload_opts = {
507 | metadata: {},
508 | acl: nil,
509 | storage_class: nil,
510 | content_type: "application/gzip",
511 | content_encoding: nil,
512 | encryption_key: nil,
513 | }.merge(enc_opts)
514 |
515 | check_upload(conf) do |bucket|
516 | bucket.find_file(anything, enc_opts) { true }
517 | bucket.find_file(anything, enc_opts) { true }
518 | bucket.upload_file(anything, "log/20160101.gz", upload_opts.merge(enc_opts))
519 | end
520 | end
521 |
522 | def test_write_with_overwrite_false
523 | conf = config(CONFIG, "object_key_format %{path}%{time_slice}.%{file_extension}", "overwrite false")
524 |
525 | enc_opts = {
526 | encryption_key: nil,
527 | }
528 |
529 | assert_raise do
530 | silenced do
531 | check_upload(conf) do |bucket|
532 | bucket.find_file(anything, enc_opts) { true }
533 | bucket.find_file(anything, enc_opts) { true }
534 | end
535 | end
536 | end
537 | end
538 | end
539 | end
540 |
--------------------------------------------------------------------------------