├── .circleci
└── config.yml
├── .gitattributes
├── .github
└── rebase.yml
├── .gitignore
├── README.md
├── certs
└── cacert.pem
└── src
├── Serilog.Sinks.Kafka.TestApi
├── Program.cs
├── Properties
│ └── launchSettings.json
├── Serilog.Sinks.Kafka.TestApi.csproj
├── Startup.cs
├── appsettings.Development.json
└── appsettings.json
├── Serilog.Sinks.Kafka.TestApp
├── Program.cs
├── Serilog.Sinks.Kafka.TestApp.csproj
└── appsettings.json
├── Serilog.Sinks.Kafka.sln
└── Serilog.Sinks.Kafka
├── KafkaSink.cs
├── LoggerConfigurationExtensions.cs
├── ProducerConfigExtensions.cs
└── Serilog.Sinks.Kafka.csproj
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | jobs:
4 | build:
5 | docker:
6 | - image: "mcr.microsoft.com/dotnet/core/sdk:3.1"
7 | steps:
8 | - checkout
9 | - run:
10 | name: Build
11 | command: dotnet build ./src
12 | package:
13 | docker:
14 | - image: "mcr.microsoft.com/dotnet/core/sdk:3.1"
15 | steps:
16 | - checkout
17 | - run:
18 | name: Setup SEM_VERSION Environment Variable
19 | command: echo 'SEM_VERSION=$(echo $CIRCLE_TAG | sed -n "s/^v\(.*\)/\1/p")' >> $BASH_ENV
20 | - run:
21 | name: Build
22 | command: dotnet build ./src
23 | - run:
24 | name: Package
25 | command: dotnet pack ./src/Serilog.Sinks.Kafka/Serilog.Sinks.Kafka.csproj --no-build -p:PackageVersion=$SEM_VERSION --output ./nupkgs
26 | - run:
27 | name: Push Package to NuGet
28 | command: dotnet nuget push ./nupkgs/Serilog.Sinks.Confluent.Kafka.$SEM_VERSION.nupkg -k $NUGET_API_KEY -s https://api.nuget.org/v3/index.json
29 |
30 | workflows:
31 | version: 2
32 | pr_workflow:
33 | jobs:
34 | - build:
35 | filters:
36 | branches:
37 | ignore: master
38 | merge_workflow:
39 | jobs:
40 | - build:
41 | filters:
42 | branches:
43 | only: master
44 | tag_workflow:
45 | jobs:
46 | - package:
47 | filters:
48 | tags:
49 | only: /^v(.*)/
50 | branches:
51 | ignore: /.*/
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto
2 | .circleci/**/*.* text eol=LF
3 | .helm/**/*.* text eol=LF
4 | scripts/**/*.* text eol=LF
5 | docker/**/*.* text eol=LF
--------------------------------------------------------------------------------
/.github/rebase.yml:
--------------------------------------------------------------------------------
1 | name: Automatic Rebase
2 | on:
3 | issue_comment:
4 | types: [created]
5 | jobs:
6 | rebase:
7 | name: Rebase
8 | if: github.event.issue.pull_request != '' && contains(github.event.comment.body, '/rebase')
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Checkout the latest code
12 | uses: actions/checkout@v2
13 | with:
14 | token: ${{ secrets.AUTOREBASE_PAT }}
15 | fetch-depth: 0 # otherwise, you will fail to push refs to dest repo
16 | - name: Automatic Rebase
17 | uses: cirrus-actions/rebase@1.4
18 | env:
19 | GITHUB_TOKEN: ${{ secrets.AUTOREBASE_PAT }}
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/visualstudio
3 |
4 | ### VisualStudio ###
5 | ## Ignore Visual Studio temporary files, build results, and
6 | ## files generated by popular Visual Studio add-ons.
7 | ##
8 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
9 |
10 | # User-specific files
11 | *.suo
12 | *.user
13 | *.userosscache
14 | *.sln.docstates
15 |
16 | # User-specific files (MonoDevelop/Xamarin Studio)
17 | *.userprefs
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | bld/
27 | [Bb]in/
28 | [Oo]bj/
29 | [Ll]og/
30 | [Ll]ib/
31 |
32 | # Visual Studio 2015/2017 cache/options directory
33 | .vs/
34 | .vscode/
35 | # Uncomment if you have tasks that create the project's static files in wwwroot
36 | #wwwroot/
37 |
38 | # Visual Studio 2017 auto generated files
39 | Generated\ Files/
40 |
41 | # MSTest test Results
42 | [Tt]est[Rr]esult*/
43 | [Bb]uild[Ll]og.*
44 |
45 | # NUNIT
46 | *.VisualState.xml
47 | TestResult.xml
48 |
49 | # Build Results of an ATL Project
50 | [Dd]ebugPS/
51 | [Rr]eleasePS/
52 | dlldata.c
53 |
54 | # Benchmark Results
55 | BenchmarkDotNet.Artifacts/
56 |
57 | # .NET Core
58 | project.lock.json
59 | project.fragment.lock.json
60 | artifacts/
61 |
62 | # StyleCop
63 | StyleCopReport.xml
64 |
65 | # Files built by Visual Studio
66 | *_i.c
67 | *_p.c
68 | *_i.h
69 | *.ilk
70 | *.meta
71 | *.obj
72 | *.iobj
73 | *.pch
74 | *.pdb
75 | *.ipdb
76 | *.pgc
77 | *.pgd
78 | *.rsp
79 | *.sbr
80 | *.tlb
81 | *.tli
82 | *.tlh
83 | *.tmp
84 | *.tmp_proj
85 | *.log
86 | *.vspscc
87 | *.vssscc
88 | .builds
89 | *.pidb
90 | *.svclog
91 | *.scc
92 |
93 | # Chutzpah Test files
94 | _Chutzpah*
95 |
96 | # Visual C++ cache files
97 | ipch/
98 | *.aps
99 | *.ncb
100 | *.opendb
101 | *.opensdf
102 | *.sdf
103 | *.cachefile
104 | *.VC.db
105 | *.VC.VC.opendb
106 |
107 | # Visual Studio profiler
108 | *.psess
109 | *.vsp
110 | *.vspx
111 | *.sap
112 |
113 | # Visual Studio Trace Files
114 | *.e2e
115 |
116 | # TFS 2012 Local Workspace
117 | $tf/
118 |
119 | # Guidance Automation Toolkit
120 | *.gpState
121 |
122 | # ReSharper is a .NET coding add-in
123 | _ReSharper*/
124 | *.[Rr]e[Ss]harper
125 | *.DotSettings.user
126 |
127 | # JustCode is a .NET coding add-in
128 | .JustCode
129 |
130 | # TeamCity is a build add-in
131 | _TeamCity*
132 |
133 | # DotCover is a Code Coverage Tool
134 | *.dotCover
135 |
136 | # AxoCover is a Code Coverage Tool
137 | .axoCover/*
138 | !.axoCover/settings.json
139 |
140 | # Visual Studio code coverage results
141 | *.coverage
142 | *.coveragexml
143 |
144 | # NCrunch
145 | _NCrunch_*
146 | .*crunch*.local.xml
147 | nCrunchTemp_*
148 |
149 | # MightyMoose
150 | *.mm.*
151 | AutoTest.Net/
152 |
153 | # Web workbench (sass)
154 | .sass-cache/
155 |
156 | # Installshield output folder
157 | [Ee]xpress/
158 |
159 | # DocProject is a documentation generator add-in
160 | DocProject/buildhelp/
161 | DocProject/Help/*.HxT
162 | DocProject/Help/*.HxC
163 | DocProject/Help/*.hhc
164 | DocProject/Help/*.hhk
165 | DocProject/Help/*.hhp
166 | DocProject/Help/Html2
167 | DocProject/Help/html
168 |
169 | # Click-Once directory
170 | publish/
171 |
172 | # Publish Web Output
173 | *.[Pp]ublish.xml
174 | *.azurePubxml
175 | # Note: Comment the next line if you want to checkin your web deploy settings,
176 | # but database connection strings (with potential passwords) will be unencrypted
177 | *.pubxml
178 | *.publishproj
179 |
180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
181 | # checkin your Azure Web App publish settings, but sensitive information contained
182 | # in these scripts will be unencrypted
183 | PublishScripts/
184 |
185 | # NuGet Packages
186 | *.nupkg
187 | # The packages folder can be ignored because of Package Restore
188 | **/[Pp]ackages/*
189 | # except build/, which is used as an MSBuild target.
190 | !**/[Pp]ackages/build/
191 | # Uncomment if necessary however generally it will be regenerated when needed
192 | #!**/[Pp]ackages/repositories.config
193 | # NuGet v3's project.json files produces more ignorable files
194 | *.nuget.props
195 | *.nuget.targets
196 |
197 | # Microsoft Azure Build Output
198 | csx/
199 | *.build.csdef
200 |
201 | # Microsoft Azure Emulator
202 | ecf/
203 | rcf/
204 |
205 | # Windows Store app package directories and files
206 | AppPackages/
207 | BundleArtifacts/
208 | Package.StoreAssociation.xml
209 | _pkginfo.txt
210 | *.appx
211 |
212 | # Visual Studio cache files
213 | # files ending in .cache can be ignored
214 | *.[Cc]ache
215 | # but keep track of directories ending in .cache
216 | !*.[Cc]ache/
217 |
218 | # Others
219 | ClientBin/
220 | ~$*
221 | *~
222 | *.dbmdl
223 | *.dbproj.schemaview
224 | *.jfm
225 | *.pfx
226 | *.publishsettings
227 | orleans.codegen.cs
228 |
229 | # Including strong name files can present a security risk
230 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
231 | #*.snk
232 |
233 | # Since there are multiple workflows, uncomment next line to ignore bower_components
234 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
235 | #bower_components/
236 |
237 | # RIA/Silverlight projects
238 | Generated_Code/
239 |
240 | # Backup & report files from converting an old project file
241 | # to a newer Visual Studio version. Backup files are not needed,
242 | # because we have git ;-)
243 | _UpgradeReport_Files/
244 | Backup*/
245 | UpgradeLog*.XML
246 | UpgradeLog*.htm
247 | ServiceFabricBackup/
248 | *.rptproj.bak
249 |
250 | # SQL Server files
251 | *.mdf
252 | *.ldf
253 | *.ndf
254 |
255 | # Business Intelligence projects
256 | *.rdl.data
257 | *.bim.layout
258 | *.bim_*.settings
259 | *.rptproj.rsuser
260 |
261 | # Microsoft Fakes
262 | FakesAssemblies/
263 |
264 | # GhostDoc plugin setting file
265 | *.GhostDoc.xml
266 |
267 | # Node.js Tools for Visual Studio
268 | .ntvs_analysis.dat
269 | node_modules/
270 |
271 | # Visual Studio 6 build log
272 | *.plg
273 |
274 | # Visual Studio 6 workspace options file
275 | *.opt
276 |
277 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
278 | *.vbw
279 |
280 | # Visual Studio LightSwitch build output
281 | **/*.HTMLClient/GeneratedArtifacts
282 | **/*.DesktopClient/GeneratedArtifacts
283 | **/*.DesktopClient/ModelManifest.xml
284 | **/*.Server/GeneratedArtifacts
285 | **/*.Server/ModelManifest.xml
286 | _Pvt_Extensions
287 |
288 | # Paket dependency manager
289 | .paket/paket.exe
290 | paket-files/
291 |
292 | # FAKE - F# Make
293 | .fake/
294 |
295 | # JetBrains Rider
296 | .idea/
297 | *.sln.iml
298 |
299 | # CodeRush
300 | .cr/
301 |
302 | # Python Tools for Visual Studio (PTVS)
303 | __pycache__/
304 | *.pyc
305 |
306 | # Cake - Uncomment if you are using it
307 | # tools/**
308 | # !tools/packages.config
309 |
310 | # Tabs Studio
311 | *.tss
312 |
313 | # Telerik's JustMock configuration file
314 | *.jmconfig
315 |
316 | # BizTalk build output
317 | *.btp.cs
318 | *.btm.cs
319 | *.odx.cs
320 | *.xsd.cs
321 |
322 | # OpenCover UI analysis results
323 | OpenCover/
324 |
325 | # Azure Stream Analytics local run output
326 | ASALocalRun/
327 |
328 | # MSBuild Binary and Structured Log
329 | *.binlog
330 |
331 | # NVidia Nsight GPU debugger configuration file
332 | *.nvuser
333 |
334 | # MFractors (Xamarin productivity tool) working folder
335 | .mfractor/
336 |
337 | ### VisualStudio Patch ###
338 | # By default, sensitive information, such as encrypted password
339 | # should be stored in the .pubxml.user file.
340 | *.pubxml.user
341 |
342 |
343 | # End of https://www.gitignore.io/api/visualstudio
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # serilog-sinks-kafka - no longer maintained
2 |
3 | > ⚠️ **WARNING** ⚠️ - As we no longer use this project internally at Imburse we no longer maintain this project. This repository has been archived but will remain public for people to fork if desired.
4 |
5 | ---
6 |
7 | [](https://circleci.com/gh/imburseag/serilog-sinks-kafka/tree/master) [](https://www.nuget.org/packages/Serilog.Sinks.Confluent.Kafka/) [](https://www.nuget.org/packages/Serilog.Sinks.Confluent.Kafka/)
8 |
9 | A Serilog sink that writes events to Kafka Endpoints (Including Azure Event Hubs).
10 |
11 | ## Dependencies
12 |
13 | This sink works with the following packages
14 |
15 | * Serilog >v2.9.0
16 | * Serilog.Sinks.PeriodicBatching >v2.3.0
17 | * Confluent.Kafka >v1.6.3
18 |
19 | ## Usage
20 |
21 | ```
22 | Log.Logger = new LoggerConfiguration()
23 | .WriteTo.Kafka()
24 | .CreateLogger();
25 | ```
26 |
27 | ### Parameters
28 | * **bootstrapServers** - Comma separated list of Kafka Bootstrap Servers. Defaults to "localhost:9092"
29 | * **batchSizeLimit** - Maximum number of logs to batch. Defaults to 50
30 | * **period** - The period in seconds to send batches of logs. Defaults to 5 seconds
31 | * **securityProtocol** - SecurityProtocol.Plaintext
32 | * **saslMechanism** - The SASL Mecahnism. Defaults to SaslMechanism.Plain
33 | * **topic** - Name of the Kafka topic. Defaults to "logs"
34 | * **topicDecider** - Alternative to a static/constant "topic" value. Function that can be used to determine the topic to be written to at runtime (example below)
35 | * **saslUsername** - (Optional) Username for SASL. This is required for Azure Event Hubs and should be set to `$ConnectionString`
36 | * **saslPassword** - (Optional) Password for SASL. This is required for Azure Event Hubs and is your entire Connection String.
37 | * **sslCaLocation** - (Optional) Location of the SSL CA Certificates This is required for Azure Event Hubs and should be set to `./cacert.pem` as this package includes the Azure carcert.pem file which is copied into your binary output directory.
38 | * **formatter** - optional `ITextFormatter` you can specify to format log entries. Defaults to the standard `JsonFormatter` with `renderMessage` set to `true`.
39 |
40 |
41 | ## Configuration for a local Kafka instance using appsettings
42 | ```
43 | {
44 | "Serilog": {
45 | "MinimumLevel": {
46 | "Default": "Debug",
47 | "Override": {
48 | "Microsoft": "Warning",
49 | "System": "Warning"
50 | }
51 | },
52 | "WriteTo": [
53 | {
54 | "Name": "Kafka",
55 | "Args": {
56 | "batchSizeLimit": "50",
57 | "period": "5",
58 | "bootstrapServers": "localhost:9092",
59 | "topic": "logs"
60 | }
61 | }
62 | ]
63 | }
64 | }
65 |
66 | ```
67 |
68 | ## Configuration with a `topicDecider` and a custom formatter
69 |
70 | ```csharp
71 | Log.Logger = new LoggerConfiguration()
72 | .WriteTo.Kafka(GetTopicName, "localhost:9092",
73 | formatter: new CustomElasticsearchFormatter("LogEntry"));
74 | .CreateLogger();
75 | ```
76 |
77 | The code above specifies `GetTopicName` as the `topicDecider`. Here is a sample implementation:
78 |
79 | ```csharp
80 | private static string GetTopicName(LogEvent logEntry)
81 | {
82 | var logInfo = logEntry.Properties["LogEntry"] as StructureValue;
83 | var lookup = logInfo?.Properties.FirstOrDefault(a => a.Name == "some_property_name");
84 |
85 | return (string.Equals(lookup, "valueForTopicA"))
86 | ? "topicA"
87 | : "topicB";
88 | }
89 | ```
90 |
91 | The above code also references a `CustomElasticSearchFormatter` that uses the whole `LogEntry` as the input to the formatter. This is a custom formatter that inherits from `ElasticsearchJsonFormatter` in the `Serilog.Sinks.Elasticsearch` NuGet package, but can be any `ITextFormatter` that you want to use when sending the log entry to Kafka. Note that if you omit the formatter param (which is fine), the standard `JsonFormatter` will be used (with the `renderMessage` parameter set to `true`).
92 |
93 |
94 | ## Configuration for Azure Event Hubs instance
95 |
96 | You will need to ensure you have a copy of the Azure CA Certificates and define the location of this cert in the `sslCaLocation`.
97 |
98 | You can download a copy of the Azure CA Certificate [here](./certs/cacert.pem).
99 |
100 | Place this in you projects root directory and ensure it is copied to the build output in your csproj.
101 |
102 | ```
103 |
104 |
105 | Always
106 |
107 |
108 | ```
109 |
110 | ### Configuration for Azure Event Hubs.
111 | ```
112 | Log.Logger = new LoggerConfiguration()
113 | .WriteTo.Kafka(
114 | batchSizeLimit: 50,
115 | period: 5,
116 | bootstrapServers: "my-event-hub-instance.servicebus.windows.net:9093",
117 | saslUsername: "$ConnectionString",
118 | saslPassword: "my-event-hub-instance-connection-string",
119 | topic: "logs",
120 | sslCaLocation: "./cacert.pem",
121 | saslMechanism: SaslMechanism.Plain,
122 | securityProtocol: SecurityProtocol.SaslSsl)
123 | .CreateLogger();
124 | ```
125 |
126 | ### Or using appsettings...
127 | ```
128 | {
129 | "Serilog": {
130 | "MinimumLevel": {
131 | "Default": "Debug",
132 | "Override": {
133 | "Microsoft": "Warning",
134 | "System": "Warning"
135 | }
136 | },
137 | "WriteTo": [
138 | {
139 | "Name": "Kafka",
140 | "Args": {
141 | "batchSizeLimit": "50",
142 | "period": "5",
143 | "bootstrapServers": "my-event-hub-instance.servicebus.windows.net:9093",
144 | "saslUsername": "$ConnectionString",
145 | "saslPassword": "my-event-hub-instance-connection-string",
146 | "topic": "logs",
147 | "sslCaLocation": "./cacert.pem",
148 | "saslMechanism": "Plain",
149 | "securityProtocol": "SaslSsl"
150 | }
151 | }
152 | ]
153 | }
154 | }
155 |
156 | ```
157 |
158 | ## Extra Configuration using Environment Variables
159 |
160 | You can also specify `ProducerConfig` configuration using EnvironmentVariables.
161 | These settings can be specified as the following EnvironmentVariables...
162 |
163 | `SERILOG__KAFKA__ProducerConfigPropertyName`
164 |
165 | or
166 |
167 | `SERILOG__KAFKA__PRODUCER_CONFIG_PROPERTY_NAME`.
168 |
169 | `SERILOG__KAFKA__` is first stripped from the Environment Variable Name and the remaining name is lowered and single `_` is replaced with `string.Empty`.
170 |
171 | The `ProducerConfig` is first loaded from any specified Environment Variables. Then any of the configuration passed into the KafkaSink constructor will override the Environment Variables.
172 | This is to ensure backwards compatability at the moment but passing this configuration into the KafkaSink constructor will be removed in the future.
173 |
174 | You can check what properties are supported at the following github https://github.com/confluentinc/confluent-kafka-dotnet/blob/6128bdf65fa79fbb14210d73970fbd4f7940d4b7/src/Confluent.Kafka/Config_gen.cs#L830
175 |
176 |
177 | ## Azure EventHubs Recommended Configuration
178 | If you are running against an Azure EventHub, the following configuration is recommended.
179 |
180 | https://github.com/Azure/azure-event-hubs-for-kafka/blob/master/CONFIGURATION.md
181 |
182 | These EnvironmentVariables can be set...
183 |
184 | ```
185 | SERILOG__KAFKA__SocketKeepaliveEnable=true
186 | SERILOG__KAFKA__MetadataMaxAgeMs=180000
187 | SERILOG__KAFKA__RequestTimeoutMs=30000
188 | SERILOG__KAFKA__Partitioner=ConsistentRandom
189 | SERILOG__KAFKA__EnableIdempotence=false
190 | SERILOG__KAFKA__CompressionType=None
191 | ```
192 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApi/Program.cs:
--------------------------------------------------------------------------------
1 | using Microsoft.AspNetCore;
2 | using Microsoft.AspNetCore.Hosting;
3 | using Serilog;
4 |
5 | namespace Serilog.Sinks.Kafka.TestApi
6 | {
7 | public class Program
8 | {
9 | public static void Main(string[] args)
10 | {
11 | CreateWebHostBuilder(args)
12 | .Build()
13 | .Run();
14 | }
15 |
16 | public static IWebHostBuilder CreateWebHostBuilder(string[] args) => WebHost
17 | .CreateDefaultBuilder(args)
18 | .UseSerilog((context, config) => config.ReadFrom.Configuration(context.Configuration))
19 | .UseStartup();
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApi/Properties/launchSettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "profiles": {
3 | "Serliog.Sinks.Kafka.TestApi": {
4 | "commandName": "Project",
5 | "launchBrowser": true,
6 | "applicationUrl": "http://localhost:5050",
7 | "environmentVariables": {
8 | "ASPNETCORE_ENVIRONMENT": "Development",
9 | "SERILOG__KAFKA__BOOTSTRAP_SERVERS": "localhost:9092",
10 | "SERILOG__KAFKA__SocketKeepaliveEnable": "true",
11 | "SERILOG__KAFKA__MetadataMaxAgeMs": "180000",
12 | "SERILOG__KAFKA__RequestTimeoutMs": "30000",
13 | "SERILOG__KAFKA__Partitioner": "ConsistentRandom",
14 | "SERILOG__KAFKA__EnableIdempotence": "false",
15 | "SERILOG__KAFKA__CompressionType": "None"
16 | }
17 | }
18 | }
19 | }
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApi/Serilog.Sinks.Kafka.TestApi.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netcoreapp3.1
5 | false
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 | Always
25 |
26 |
27 | Always
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApi/Startup.cs:
--------------------------------------------------------------------------------
1 | using Microsoft.AspNetCore.Builder;
2 | using Microsoft.AspNetCore.Hosting;
3 | using Microsoft.AspNetCore.Http;
4 | using Microsoft.Extensions.DependencyInjection;
5 | using Serilog;
6 | using Serilog.Debugging;
7 | using System;
8 |
9 | namespace Serilog.Sinks.Kafka.TestApi
10 | {
11 | public class Startup
12 | {
13 | // This method gets called by the runtime. Use this method to add services to the container.
14 | // For more information on how to configure your application, visit https://go.microsoft.com/fwlink/?LinkID=398940
15 | public void ConfigureServices(IServiceCollection services)
16 | {
17 | }
18 |
19 | // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
20 | public void Configure(IApplicationBuilder app, IHostingEnvironment env)
21 | {
22 | SelfLog.Enable(Console.Error);
23 |
24 | if (env.IsDevelopment())
25 | {
26 | app.UseDeveloperExceptionPage();
27 | }
28 |
29 | app.Run(async (context) =>
30 | {
31 | Log.Information("Api Application Test!");
32 | await context.Response.WriteAsync("Hello World!");
33 | });
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApi/appsettings.Development.json:
--------------------------------------------------------------------------------
1 | {
2 | "Logging": {
3 | "LogLevel": {
4 | "Default": "Debug",
5 | "System": "Information",
6 | "Microsoft": "Information"
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApi/appsettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "Serilog": {
3 | "MinimumLevel": {
4 | "Default": "Debug",
5 | "Override": {
6 | "Microsoft": "Warning",
7 | "System": "Warning"
8 | }
9 | },
10 | "WriteTo": [
11 | {
12 | "Name": "Console"
13 | },
14 | {
15 | "Name": "Kafka",
16 | "Args": {
17 | "batchSizeLimit": "50",
18 | "period": "5",
19 | "bootstrapServers": "localhost:9092",
20 | "topic": "logs"
21 | }
22 | }
23 | ]
24 | },
25 | "AllowedHosts": "*"
26 | }
27 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApp/Program.cs:
--------------------------------------------------------------------------------
1 | using Microsoft.Extensions.Configuration;
2 | using System;
3 | using System.IO;
4 |
5 | namespace Serilog.Sinks.Kafka.TestApp
6 | {
7 | class Program
8 | {
9 | static void Main(string[] args)
10 | {
11 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__BOOTSTRAP_SERVERS", "localhost:9092");
12 |
13 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__SocketKeepaliveEnable", "true");
14 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__MetadataMaxAgeMs", "180000");
15 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__RequestTimeoutMs", "30000");
16 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__Partitioner", "ConsistentRandom");
17 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__EnableIdempotence", "false");
18 | Environment.SetEnvironmentVariable("SERILOG__KAFKA__CompressionType", "None");
19 |
20 | var config = new ConfigurationBuilder()
21 | .SetBasePath(Directory.GetCurrentDirectory())
22 | .AddJsonFile("appsettings.json")
23 | .AddEnvironmentVariables()
24 | .Build();
25 |
26 | Log.Logger = new LoggerConfiguration()
27 | .ReadFrom.Configuration(config)
28 | .CreateLogger();
29 |
30 | Log.Information("Console Application Test!");
31 |
32 | Log.CloseAndFlush();
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApp/Serilog.Sinks.Kafka.TestApp.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | netcoreapp3.1
6 | false
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | Always
28 |
29 |
30 | Always
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.TestApp/appsettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "Serilog": {
3 | "MinimumLevel": {
4 | "Default": "Debug",
5 | "Override": {
6 | "Microsoft": "Warning",
7 | "System": "Warning"
8 | }
9 | },
10 | "WriteTo": [
11 | {
12 | "Name": "Console"
13 | },
14 | {
15 | "Name": "Kafka",
16 | "Args": {
17 | "batchSizeLimit": "50",
18 | "period": "5",
19 | "bootstrapServers": "localhost:9092",
20 | "topic": "logs"
21 | }
22 | }
23 | ]
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 16
4 | VisualStudioVersion = 16.0.29123.88
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Serilog.Sinks.Kafka", "Serilog.Sinks.Kafka\Serilog.Sinks.Kafka.csproj", "{81F39D8A-9730-4D5A-BC92-04EC93526652}"
7 | EndProject
8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Serilog.Sinks.Kafka.TestApp", "Serilog.Sinks.Kafka.TestApp\Serilog.Sinks.Kafka.TestApp.csproj", "{A96B550B-C1BD-4227-BC20-12D7A02277BC}"
9 | EndProject
10 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Serilog.Sinks.Kafka.TestApi", "Serilog.Sinks.Kafka.TestApi\Serilog.Sinks.Kafka.TestApi.csproj", "{EB02A379-8501-43FD-B8F9-730B0AFBDA44}"
11 | EndProject
12 | Global
13 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
14 | Debug|Any CPU = Debug|Any CPU
15 | Release|Any CPU = Release|Any CPU
16 | EndGlobalSection
17 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
18 | {81F39D8A-9730-4D5A-BC92-04EC93526652}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
19 | {81F39D8A-9730-4D5A-BC92-04EC93526652}.Debug|Any CPU.Build.0 = Debug|Any CPU
20 | {81F39D8A-9730-4D5A-BC92-04EC93526652}.Release|Any CPU.ActiveCfg = Release|Any CPU
21 | {81F39D8A-9730-4D5A-BC92-04EC93526652}.Release|Any CPU.Build.0 = Release|Any CPU
22 | {A96B550B-C1BD-4227-BC20-12D7A02277BC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
23 | {A96B550B-C1BD-4227-BC20-12D7A02277BC}.Debug|Any CPU.Build.0 = Debug|Any CPU
24 | {A96B550B-C1BD-4227-BC20-12D7A02277BC}.Release|Any CPU.ActiveCfg = Release|Any CPU
25 | {A96B550B-C1BD-4227-BC20-12D7A02277BC}.Release|Any CPU.Build.0 = Release|Any CPU
26 | {EB02A379-8501-43FD-B8F9-730B0AFBDA44}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
27 | {EB02A379-8501-43FD-B8F9-730B0AFBDA44}.Debug|Any CPU.Build.0 = Debug|Any CPU
28 | {EB02A379-8501-43FD-B8F9-730B0AFBDA44}.Release|Any CPU.ActiveCfg = Release|Any CPU
29 | {EB02A379-8501-43FD-B8F9-730B0AFBDA44}.Release|Any CPU.Build.0 = Release|Any CPU
30 | EndGlobalSection
31 | GlobalSection(SolutionProperties) = preSolution
32 | HideSolutionNode = FALSE
33 | EndGlobalSection
34 | GlobalSection(ExtensibilityGlobals) = postSolution
35 | SolutionGuid = {98913C3F-3465-4182-9110-36412E94E18D}
36 | EndGlobalSection
37 | EndGlobal
38 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka/KafkaSink.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Globalization;
4 | using System.IO;
5 | using System.Reflection;
6 | using System.Text;
7 | using System.Threading.Tasks;
8 | using Confluent.Kafka;
9 | using Serilog.Events;
10 | using Serilog.Formatting;
11 | using Serilog.Sinks.PeriodicBatching;
12 |
13 | namespace Serilog.Sinks.Kafka
14 | {
15 | public class KafkaSink : IBatchedLogEventSink
16 | {
17 | private const int FlushTimeoutSecs = 10;
18 |
19 | private readonly TopicPartition _globalTopicPartition;
20 | private readonly ITextFormatter _formatter;
21 | private readonly Func _topicDecider;
22 | private IProducer _producer;
23 |
24 | public KafkaSink(
25 | string bootstrapServers,
26 | SecurityProtocol securityProtocol,
27 | SaslMechanism saslMechanism,
28 | string saslUsername,
29 | string saslPassword,
30 | string sslCaLocation,
31 | string topic = null,
32 | Func topicDecider = null,
33 | ITextFormatter formatter = null)
34 | {
35 | ConfigureKafkaConnection(
36 | bootstrapServers,
37 | securityProtocol,
38 | saslMechanism,
39 | saslUsername,
40 | saslPassword,
41 | sslCaLocation);
42 |
43 | _formatter = formatter ?? new Formatting.Json.JsonFormatter(renderMessage: true);
44 |
45 | if (topic != null)
46 | _globalTopicPartition = new TopicPartition(topic, Partition.Any);
47 |
48 | if (topicDecider != null)
49 | _topicDecider = topicDecider;
50 | }
51 |
52 | public Task OnEmptyBatchAsync() => Task.CompletedTask;
53 |
54 | public Task EmitBatchAsync(IEnumerable batch)
55 | {
56 | foreach (var logEvent in batch)
57 | {
58 | Message message;
59 |
60 | var topicPartition = _topicDecider == null
61 | ? _globalTopicPartition
62 | : new TopicPartition(_topicDecider(logEvent), Partition.Any);
63 |
64 | using (var render = new StringWriter(CultureInfo.InvariantCulture))
65 | {
66 | _formatter.Format(logEvent, render);
67 |
68 | message = new Message
69 | {
70 | Value = Encoding.UTF8.GetBytes(render.ToString())
71 | };
72 | }
73 |
74 | _producer.Produce(topicPartition, message);
75 | }
76 |
77 | _producer.Flush(TimeSpan.FromSeconds(FlushTimeoutSecs));
78 |
79 | return Task.CompletedTask;
80 | }
81 |
82 | private void ConfigureKafkaConnection(
83 | string bootstrapServers,
84 | SecurityProtocol securityProtocol,
85 | SaslMechanism saslMechanism,
86 | string saslUsername,
87 | string saslPassword,
88 | string sslCaLocation)
89 | {
90 | var config = new ProducerConfig()
91 | .SetValue("ApiVersionFallbackMs", 0)
92 | .SetValue("EnableDeliveryReports", false)
93 | .LoadFromEnvironmentVariables()
94 | .SetValue("BootstrapServers", bootstrapServers)
95 | .SetValue("SecurityProtocol", securityProtocol)
96 | .SetValue("SaslMechanism", saslMechanism)
97 | .SetValue("SslCaLocation",
98 | string.IsNullOrEmpty(sslCaLocation)
99 | ? null
100 | : Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), sslCaLocation))
101 | .SetValue("SaslUsername", saslUsername)
102 | .SetValue("SaslPassword", saslPassword);
103 |
104 | _producer = new ProducerBuilder(config)
105 | .Build();
106 | }
107 | }
108 | }
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka/LoggerConfigurationExtensions.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using Confluent.Kafka;
3 | using Serilog.Configuration;
4 | using Serilog.Events;
5 | using Serilog.Formatting;
6 | using Serilog.Sinks.PeriodicBatching;
7 |
8 | namespace Serilog.Sinks.Kafka
9 | {
10 | public static class LoggerConfigurationExtensions
11 | {
12 | ///
13 | /// Adds a sink that writes log events to a Kafka topic in the broker endpoints.
14 | ///
15 | /// The logger configuration.
16 | /// The maximum number of events to include in a single batch.
17 | /// The time in seconds to wait between checking for event batches.
18 | /// The list of bootstrapServers separated by comma.
19 | /// The topic name.
20 | ///
21 | public static LoggerConfiguration Kafka(
22 | this LoggerSinkConfiguration loggerConfiguration,
23 | string bootstrapServers = "localhost:9092",
24 | int batchSizeLimit = 50,
25 | int period = 5,
26 | SecurityProtocol securityProtocol = SecurityProtocol.Plaintext,
27 | SaslMechanism saslMechanism = SaslMechanism.Plain,
28 | string topic = "logs",
29 | string saslUsername = null,
30 | string saslPassword = null,
31 | string sslCaLocation = null,
32 | ITextFormatter formatter = null)
33 | {
34 | return loggerConfiguration.Kafka(
35 | bootstrapServers,
36 | batchSizeLimit,
37 | period,
38 | securityProtocol,
39 | saslMechanism,
40 | saslUsername,
41 | saslPassword,
42 | sslCaLocation,
43 | topic,
44 | topicDecider: null,
45 | formatter);
46 | }
47 |
48 | public static LoggerConfiguration Kafka(
49 | this LoggerSinkConfiguration loggerConfiguration,
50 | Func topicDecider,
51 | string bootstrapServers = "localhost:9092",
52 | int batchSizeLimit = 50,
53 | int period = 5,
54 | SecurityProtocol securityProtocol = SecurityProtocol.Plaintext,
55 | SaslMechanism saslMechanism = SaslMechanism.Plain,
56 | string saslUsername = null,
57 | string saslPassword = null,
58 | string sslCaLocation = null,
59 | ITextFormatter formatter = null)
60 | {
61 | return loggerConfiguration.Kafka(
62 | bootstrapServers,
63 | batchSizeLimit,
64 | period,
65 | securityProtocol,
66 | saslMechanism,
67 | saslUsername,
68 | saslPassword,
69 | sslCaLocation,
70 | topic: null,
71 | topicDecider,
72 | formatter);
73 | }
74 |
75 | private static LoggerConfiguration Kafka(
76 | this LoggerSinkConfiguration loggerConfiguration,
77 | string bootstrapServers,
78 | int batchSizeLimit,
79 | int period,
80 | SecurityProtocol securityProtocol,
81 | SaslMechanism saslMechanism,
82 | string saslUsername,
83 | string saslPassword,
84 | string sslCaLocation,
85 | string topic,
86 | Func topicDecider,
87 | ITextFormatter formatter)
88 | {
89 | var kafkaSink = new KafkaSink(
90 | bootstrapServers,
91 | securityProtocol,
92 | saslMechanism,
93 | saslUsername,
94 | saslPassword,
95 | sslCaLocation,
96 | topic,
97 | topicDecider,
98 | formatter);
99 |
100 | var batchingOptions = new PeriodicBatchingSinkOptions
101 | {
102 | BatchSizeLimit = batchSizeLimit,
103 | Period = TimeSpan.FromSeconds(period)
104 | };
105 |
106 | var batchingSink = new PeriodicBatchingSink(
107 | kafkaSink,
108 | batchingOptions);
109 |
110 | return loggerConfiguration
111 | .Sink(batchingSink);
112 | }
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka/ProducerConfigExtensions.cs:
--------------------------------------------------------------------------------
1 | using Confluent.Kafka;
2 | using System;
3 | using System.Collections;
4 | using System.Collections.Generic;
5 | using System.Linq;
6 |
7 | namespace Serilog.Sinks.Kafka
8 | {
9 | public static class ProducerConfigExtensions
10 | {
11 | private const string SerilogEnvVar = "SERILOG__KAFKA__";
12 |
13 | public static ProducerConfig LoadFromEnvironmentVariables(this ProducerConfig config)
14 | {
15 | var envVars = Environment.GetEnvironmentVariables();
16 |
17 | foreach (DictionaryEntry envVar in envVars)
18 | {
19 | var key = envVar.Key.ToString();
20 | var value = envVar.Value.ToString();
21 |
22 | if (key.StartsWith(SerilogEnvVar))
23 | {
24 | var configItem = key
25 | .Replace(SerilogEnvVar, string.Empty)
26 | .Replace("_", "")
27 | .ToLower();
28 |
29 | config.SetValue(configItem, value);
30 | }
31 | }
32 |
33 | return config;
34 | }
35 |
36 | public static ProducerConfig SetValue(this ProducerConfig config, string key, object value)
37 | {
38 | SetValues(config, key, value?.ToString());
39 |
40 | return config;
41 | }
42 |
43 | private static void SetValues(object obj, string propertyName, string stringValue)
44 | {
45 | if (string.IsNullOrEmpty(stringValue))
46 | return;
47 |
48 | var propertyInfo = obj.GetType().
49 | GetProperties()
50 | .SingleOrDefault(x => x.Name.ToLower() == propertyName.ToLower());
51 |
52 | object objValue = null;
53 |
54 | if (propertyInfo == null)
55 | throw new ArgumentException($"A property ({propertyName}) could not be found in Confluent.Kafka)");
56 |
57 | var convertValue = new Dictionary
58 | {
59 | { typeof(string), () => objValue = stringValue },
60 | { typeof(int?), () => objValue = int.Parse(stringValue) },
61 | { typeof(bool?), () => objValue = bool.Parse(stringValue) },
62 | { typeof(Partitioner?), () => objValue = Enum.Parse(typeof(Partitioner), stringValue) },
63 | { typeof(CompressionType?), () => objValue = Enum.Parse(typeof(CompressionType), stringValue) },
64 | { typeof(SecurityProtocol?), () => objValue = Enum.Parse(typeof(SecurityProtocol), stringValue) },
65 | { typeof(SaslMechanism?), () => objValue = Enum.Parse(typeof(SaslMechanism), stringValue) },
66 | { typeof(BrokerAddressFamily?), () => objValue = Enum.Parse(typeof(BrokerAddressFamily), stringValue) },
67 | { typeof(Acks?), () => objValue = Enum.Parse(typeof(Acks), stringValue) },
68 | { typeof(SslEndpointIdentificationAlgorithm?), () => objValue = Enum.Parse(typeof(SslEndpointIdentificationAlgorithm), stringValue) }
69 | };
70 |
71 | convertValue[propertyInfo.PropertyType]();
72 |
73 | if (objValue == null)
74 | throw new InvalidCastException($"{stringValue} could not be assigned to {propertyName} ({propertyInfo.PropertyType.Name})");
75 |
76 | propertyInfo.SetValue(obj, objValue);
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/src/Serilog.Sinks.Kafka/Serilog.Sinks.Kafka.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netstandard2.0
5 | true
6 | Serilog.Sinks.Confluent.Kafka
7 | Serilog.Sinks.Confluent.Kafka
8 | Imburse AG
9 | Serilog event sink that writes to Kafka endpoints, using Confluent.Kafka, including Azure Event Hubs. This sink works with Serilog Version >2.8.0
10 | Serilog event sink that writes to Kafka endpoints, using Confluent.Kafka, including Azure Event Hubs. This sink works with Serilog Version >2.8.0
11 | https://github.com/imburseag/serilog-sinks-kafka
12 | http://serilog.net/images/serilog-sink-nuget.png
13 | seilog logging azure kafka
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------