├── .github
├── release-drafter.yml
└── workflows
│ ├── ci.yml
│ ├── release-drafter.yml
│ └── scala-steward.yml
├── .gitignore
├── .jvmopts
├── .mergify.yml
├── LICENSE
├── README.md
├── benchmark.sh
├── benchmarks
└── src
│ └── main
│ ├── resources
│ ├── application.conf
│ ├── log4j2.xml
│ └── logback.xml
│ └── scala
│ └── scribe
│ └── benchmark
│ ├── LoggingSpeedBenchmark.scala
│ ├── LoggingStressTest.scala
│ ├── LoggingVisualizations.scala
│ ├── PerformanceBenchmark.scala
│ └── tester
│ ├── Log4CatsLoggingTester.scala
│ ├── Log4JLoggingTester.scala
│ ├── Log4JTraceLoggingTester.scala
│ ├── Log4SLoggingTester.scala
│ ├── LogbackLoggingTester.scala
│ ├── LoggingTester.scala
│ ├── ScalaLoggingLoggingTester.scala
│ ├── ScribeAsyncLoggingTester.scala
│ ├── ScribeEffectLoggingTester.scala
│ ├── ScribeEffectParallelLoggingTester.scala
│ ├── ScribeLoggingTester.scala
│ ├── Testers.scala
│ └── TinyLogLoggingTester.scala
├── build.sbt
├── cats
└── shared
│ └── src
│ ├── main
│ └── scala
│ │ └── scribe
│ │ ├── LoggerWrapper.scala
│ │ ├── Scribe.scala
│ │ ├── ScribeImpl.scala
│ │ └── cats.scala
│ └── test
│ └── scala
│ └── spec
│ └── ScribeSpec.scala
├── config
└── src
│ ├── main
│ ├── resources
│ │ └── moduload.list
│ └── scala
│ │ └── scribe
│ │ └── ScribeConfig.scala
│ └── test
│ └── scala
│ └── spec
│ └── ConfigSpec.scala
├── core
├── js-jvm
│ └── src
│ │ └── test
│ │ └── scala
│ │ └── specs
│ │ └── LoggingSpec.scala
├── js
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── scribe
│ │ ├── JavaScriptConsole.scala
│ │ ├── Platform.scala
│ │ ├── output
│ │ └── format
│ │ │ └── RichBrowserOutputFormat.scala
│ │ └── writer
│ │ └── BrowserConsoleWriter.scala
├── jvm
│ └── src
│ │ ├── main
│ │ └── scala
│ │ │ └── scribe
│ │ │ ├── Platform.scala
│ │ │ └── handler
│ │ │ ├── AsynchronousLogHandle.scala
│ │ │ ├── AtomicLongExtras.scala
│ │ │ ├── CachingLogHandler.scala
│ │ │ ├── Overflow.scala
│ │ │ └── package.scala
│ │ └── test
│ │ └── scala
│ │ └── specs
│ │ ├── AsynchronousLoggingSpec.scala
│ │ └── MDCThreadSpec.scala
├── native
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── scribe
│ │ └── Platform.scala
└── shared
│ └── src
│ ├── main
│ └── scala
│ │ └── scribe
│ │ ├── ANSI.scala
│ │ ├── Execution.scala
│ │ ├── Level.scala
│ │ ├── LogFeature.scala
│ │ ├── LogRecord.scala
│ │ ├── LogRecordCreator.scala
│ │ ├── Loggable.scala
│ │ ├── Logger.scala
│ │ ├── LoggerId.scala
│ │ ├── LoggerSupport.scala
│ │ ├── Logging.scala
│ │ ├── LoggingOutputStream.scala
│ │ ├── MinimumLevel.scala
│ │ ├── PlatformImplementation.scala
│ │ ├── Priority.scala
│ │ ├── filter
│ │ ├── AndFilters.scala
│ │ ├── ClassNameFilter.scala
│ │ ├── Filter.scala
│ │ ├── FilterBuilder.scala
│ │ ├── FilterMatcher.scala
│ │ ├── OrFilters.scala
│ │ ├── PackageNameFilter.scala
│ │ └── package.scala
│ │ ├── format
│ │ ├── AbbreviateBlock.scala
│ │ ├── CachingFormatBlock.scala
│ │ ├── FormatBlock.scala
│ │ ├── FormatBlocksFormatter.scala
│ │ ├── Formatter.scala
│ │ ├── RightPaddingBlock.scala
│ │ └── package.scala
│ │ ├── handler
│ │ ├── FunctionalLogHandler.scala
│ │ ├── LogHandle.scala
│ │ ├── LogHandler.scala
│ │ ├── LogHandlerBuilder.scala
│ │ ├── LogOverflowException.scala
│ │ └── SynchronousLogHandle.scala
│ │ ├── jul
│ │ └── JULHandler.scala
│ │ ├── mdc
│ │ ├── MDC.scala
│ │ ├── MDCManager.scala
│ │ ├── MDCMap.scala
│ │ ├── MDCThreadLocal.scala
│ │ ├── MDCValue.scala
│ │ └── package.scala
│ │ ├── message
│ │ ├── EmptyMessage.scala
│ │ ├── LazyMessage.scala
│ │ ├── LoggableMessage.scala
│ │ ├── Message.scala
│ │ └── StaticMessage.scala
│ │ ├── modify
│ │ ├── LevelFilter.scala
│ │ ├── LogBooster.scala
│ │ └── LogModifier.scala
│ │ ├── output
│ │ ├── LogOutput.scala
│ │ ├── format
│ │ │ ├── ANSIOutputFormat.scala
│ │ │ ├── ASCIIOutputFormat.scala
│ │ │ ├── HTMLOutputFormat.scala
│ │ │ └── OutputFormat.scala
│ │ └── package.scala
│ │ ├── package.scala
│ │ ├── throwable
│ │ ├── Trace.scala
│ │ └── TraceElement.scala
│ │ ├── util
│ │ ├── Abbreviator.scala
│ │ └── Time.scala
│ │ └── writer
│ │ ├── CacheWriter.scala
│ │ ├── ConsoleWriter.scala
│ │ ├── NullWriter.scala
│ │ ├── SystemErrWriter.scala
│ │ ├── SystemOutWriter.scala
│ │ ├── SystemWriter.scala
│ │ └── Writer.scala
│ └── test
│ └── scala
│ └── specs
│ ├── AbbreviatorSpec.scala
│ ├── ImplicitLoggingSpec.scala
│ ├── ImplicitLoggingTestObject.scala
│ ├── LogFeatureSpec.scala
│ ├── LoggingLevelFilteringSpec.scala
│ ├── LoggingTestObject.scala
│ └── MDCSpec.scala
├── docs
└── README.md
├── fileModule
├── jvm
│ └── src
│ │ ├── main
│ │ └── scala
│ │ │ └── scribe
│ │ │ └── file
│ │ │ └── Platform.scala
│ │ └── test
│ │ └── scala
│ │ └── spec
│ │ ├── FileLoggingSpec.scala
│ │ └── StressTestFileLogging.scala
├── native
│ └── src
│ │ └── main
│ │ └── scala
│ │ └── scribe
│ │ └── file
│ │ └── Platform.scala
└── shared
│ └── src
│ └── main
│ └── scala
│ └── scribe
│ └── file
│ ├── FileWriter.scala
│ ├── FlushMode.scala
│ ├── LogFile.scala
│ ├── LogFileStatus.scala
│ ├── PathBuilder.scala
│ ├── package.scala
│ ├── path
│ ├── FileNamePart.scala
│ ├── MaxLogs.scala
│ ├── MaxSize.scala
│ ├── PathPart.scala
│ └── Rolling.scala
│ └── writer
│ ├── IOLogFileWriter.scala
│ ├── LogFileWriter.scala
│ └── NIOLogFileWriter.scala
├── jitpack.yml
├── json
└── shared
│ └── src
│ └── main
│ └── scala
│ └── scribe
│ └── json
│ └── ScribeJsonSupport.scala
├── jsonCirce
└── shared
│ └── src
│ ├── main
│ └── scala
│ │ └── scribe
│ │ └── json
│ │ └── ScribeCirceJsonSupport.scala
│ └── test
│ └── scala
│ └── spec
│ └── JsonWriterSpec.scala
├── jsonFabric
├── jvm
│ └── src
│ │ └── test
│ │ └── scala
│ │ └── spec
│ │ └── JsonWriterSpec.scala
└── shared
│ └── src
│ └── main
│ └── scala
│ └── scribe
│ └── json
│ └── ScribeFabricJsonSupport.scala
├── local.sh
├── log4j
└── src
│ └── main
│ ├── resources
│ └── META-INF
│ │ └── services
│ │ └── org.apache.logging.log4j.spi.Provider
│ └── scala
│ └── scribe
│ ├── ScribeLoggerContext.scala
│ ├── ScribeLoggerContextFactory.scala
│ └── ScribeProvider.scala
├── logstash
└── src
│ └── main
│ └── scala
│ └── scribe
│ └── logstash
│ ├── LogstashRecord.scala
│ └── LogstashWriter.scala
├── migration
└── src
│ ├── main
│ ├── resources
│ │ └── moduload.list
│ └── scala
│ │ └── scribe
│ │ └── Log4JMigration.scala
│ └── test
│ ├── resources
│ ├── log4j.properties
│ └── logback.xml
│ └── scala
│ └── spec
│ └── Log4JMigrationSpec.scala
├── project
├── build.properties
└── plugins.sbt
├── publish.sh
├── scalastyle-config.xml
├── slack
└── src
│ └── main
│ └── scala
│ └── scribe
│ └── slack
│ ├── Slack.scala
│ ├── SlackMessage.scala
│ └── SlackWriter.scala
├── slf4j
└── src
│ ├── main
│ ├── java
│ │ └── org
│ │ │ └── slf4j
│ │ │ ├── ScribeLoggerAdapter.java
│ │ │ └── impl
│ │ │ └── StaticMDCBinder.java
│ └── scala
│ │ ├── org
│ │ └── slf4j
│ │ │ └── impl
│ │ │ └── StaticLoggerBinder.scala
│ │ └── scribe
│ │ └── slf4j
│ │ ├── SLF4JHelper.scala
│ │ ├── ScribeLoggerFactory.scala
│ │ └── ScribeMDCAdapter.scala
│ └── test
│ └── scala
│ └── spec
│ └── SLF4JSpec.scala
├── slf4j2
└── src
│ ├── main
│ ├── java
│ │ └── org
│ │ │ └── slf4j
│ │ │ └── ScribeLoggerAdapter.java
│ ├── resources
│ │ └── META-INF
│ │ │ └── services
│ │ │ └── org.slf4j.spi.SLF4JServiceProvider
│ └── scala
│ │ └── scribe
│ │ └── slf4j
│ │ ├── SLF4JHelper.scala
│ │ ├── ScribeLoggerFactory.scala
│ │ ├── ScribeMDCAdapter.scala
│ │ └── ScribeServiceProvider.scala
│ └── test
│ └── scala
│ └── spec
│ └── SLF4JSpec.scala
├── test.sh
└── work
├── benchmark
├── 2018.01.31.benchmarks.json
├── 2018.01.31.benchmarks.txt
├── 2018.04.24.benchmarks.json
├── 2018.04.24.benchmarks.txt
├── 2018.05.17.benchmarks.json
├── 2018.05.17.benchmarks.txt
├── 2018.05.29.benchmarks.json
├── 2018.08.27.benchmarks.json
├── 2018.08.27.benchmarks.txt
├── 2018.12.02.benchmarks.json
├── 2018.12.02.benchmarks.txt
├── 2019.03.28.benchmarks.json
├── 2019.03.28.benchmarks.txt
├── 2020.10.01.benchmarks.json
├── 2020.10.01.benchmarks.txt
├── 2021.10.08.benchmarks.txt
├── 2021.12.29.benchmarks.json
├── 2021.12.29.benchmarks.txt
├── 2022.02.06.benchmarks.json
├── 2022.02.06.benchmarks.txt
├── 2022.07.08.benchmarks.json
└── 2022.07.08.benchmarks.txt
└── images
├── 2018.01.31.benchmark-all-lines.png
├── 2018.01.31.benchmark-all.png
├── 2018.01.31.benchmark-log4j-lines.png
├── 2018.01.31.benchmark-log4j.png
├── 2018.01.31.benchmark-scala-logging-lines.png
├── 2018.01.31.benchmark-scala-logging.png
├── 2018.01.31.benchmark-trace-lines.png
├── 2018.01.31.benchmark-trace.png
├── 2018.08.28.benchmark-all-lines.png
├── 2018.08.28.benchmark-all.png
├── 2018.08.28.benchmark-async-lines.png
├── 2018.08.28.benchmark-async.png
├── 2018.08.28.benchmark-log4j-lines.png
├── 2018.08.28.benchmark-log4j-trace-lines.png
├── 2018.08.28.benchmark-log4j-trace.png
├── 2018.08.28.benchmark-log4j.png
├── 2018.08.28.benchmark-scala-logging-lines.png
├── 2018.08.28.benchmark-scala-logging.png
├── 2022.06.30.mdc-output.png
├── color-logs.png
└── output-colors.png
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | template: |
2 | ## What’s Changed
3 |
4 | $CHANGES
5 | categories:
6 | - title: "🔧 Dependency updates"
7 | labels:
8 | - "dependencies"
9 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 | on:
3 | pull_request:
4 | push:
5 | workflow_dispatch:
6 | jobs:
7 | test:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - name: Checkout
11 | uses: actions/checkout@v4
12 | - name: Setup JDK
13 | uses: actions/setup-java@v4
14 | with:
15 | distribution: 'zulu'
16 | java-version: 23
17 | - name: Setup sbt launcher
18 | uses: sbt/setup-sbt@v1
19 | with:
20 | sbt-runner-version: '1.10.7'
21 | - name: Coursier cache
22 | uses: coursier/cache-action@v6
23 | - name: Build and test
24 | run: |
25 | sbt -v -Dfile.encoding=UTF-8 +test
26 | sbt -v -Dfile.encoding=UTF-8 clean coverage test coverageAggregate
27 | rm -rf "$HOME/.ivy2/local" || true
28 | find $HOME/Library/Caches/Coursier/v1 -name "ivydata-*.properties" -delete || true
29 | find $HOME/.ivy2/cache -name "ivydata-*.properties" -delete || true
30 | find $HOME/.cache/coursier/v1 -name "ivydata-*.properties" -delete || true
31 | find $HOME/.sbt -name "*.lock" -delete || true
32 | - name: Run codacy-coverage-reporter
33 | uses: codacy/codacy-coverage-reporter-action@v1
34 | with:
35 | project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
36 | coverage-reports: target/scala-2.13/coverage-report/cobertura.xml
--------------------------------------------------------------------------------
/.github/workflows/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name: Release Drafter
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | update_release_draft:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: release-drafter/release-drafter@v5
13 | env:
14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
15 |
--------------------------------------------------------------------------------
/.github/workflows/scala-steward.yml:
--------------------------------------------------------------------------------
1 | # This workflow will launch at 00:00 every day
2 | name: Scala Steward
3 | on:
4 | schedule:
5 | - cron: '0 0 * * *'
6 | workflow_dispatch:
7 | jobs:
8 | scala-steward:
9 | runs-on: ubuntu-latest
10 | name: Launch Scala Steward
11 | steps:
12 | - name: Launch Scala Steward
13 | uses: scala-steward-org/scala-steward-action@v2
14 | with:
15 | github-token: ${{ secrets.ADMIN_GITHUB_TOKEN }}
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target/
2 | .idea/
3 | _site/
4 | .bsp/
5 | logs/
6 | out/
7 | benchmarks.json
8 | lowered.hnir
9 | *.iml
10 | .bloop
11 | project/project
12 | project/metals.sbt
13 | .metals
14 |
--------------------------------------------------------------------------------
/.jvmopts:
--------------------------------------------------------------------------------
1 | -Xms1024M
2 | -Xmx4096M
3 | -Xss256M
4 | -XX:MaxMetaspaceSize=4096M
5 | -XX:ReservedCodeCacheSize=500M
6 | -XX:+TieredCompilation
7 | -XX:-UseGCOverheadLimit
--------------------------------------------------------------------------------
/.mergify.yml:
--------------------------------------------------------------------------------
1 | pull_request_rules:
2 | - name: assign and label scala-steward's PRs
3 | conditions:
4 | - author=scala-steward
5 | actions:
6 | assign:
7 | users: [fthomas]
8 | label:
9 | add: [dependency-update]
10 | - name: merge scala-steward's PRs
11 | conditions:
12 | - author=scala-steward
13 | - status-success=Build and Test (ubuntu-latest, 2.12.15, adopt-hotspot@8)
14 | - status-success=Build and Test (ubuntu-latest, 2.12.15, adopt-hotspot@11)
15 | - status-success=Build and Test (ubuntu-latest, 2.13.8, adopt-hotspot@8)
16 | - status-success=Build and Test (ubuntu-latest, 2.13.8, adopt-hotspot@11)
17 | actions:
18 | merge:
19 | method: squash
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 OUTR Technologies, LLC
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/benchmark.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | rm -f benchmarks/results.txt
3 | sbt "benchmarks/runMain scribe.benchmark.LoggingVisualizations scribe" "benchmarks/runMain scribe.benchmark.LoggingVisualizations scribeAsync" "benchmarks/runMain scribe.benchmark.LoggingVisualizations scribeEffect" "benchmarks/runMain scribe.benchmark.LoggingVisualizations scribeEffectParallel" "benchmarks/runMain scribe.benchmark.LoggingVisualizations log4cats" "benchmarks/runMain scribe.benchmark.LoggingVisualizations log4s" "benchmarks/runMain scribe.benchmark.LoggingVisualizations scalaLogging" "benchmarks/runMain scribe.benchmark.LoggingVisualizations log4j" "benchmarks/runMain scribe.benchmark.LoggingVisualizations log4jTrace" "benchmarks/runMain scribe.benchmark.LoggingVisualizations logback" "benchmarks/runMain scribe.benchmark.LoggingVisualizations tinyLog"
--------------------------------------------------------------------------------
/benchmarks/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | akka.log-dead-letters = 0
2 | akka.log-dead-letters-during-shutdown = off
--------------------------------------------------------------------------------
/benchmarks/src/main/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 | %d %p %c{1.} [%t] %m%n
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/benchmarks/src/main/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | logs/logback.log}
5 |
6 | %date{HH:mm:ss} %-5level %logger{0} - %msg%n
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/LoggingSpeedBenchmark.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark
2 |
3 | import org.openjdk.jmh.annotations
4 | import scribe.benchmark.tester._
5 |
6 | import java.util.concurrent.TimeUnit
7 |
8 | // jmh:run -i 3 -wi 3 -f1 -t1 -rf JSON -rff benchmarks.json
9 | @annotations.State(annotations.Scope.Thread)
10 | class LoggingSpeedBenchmark {
11 | val Iterations: Int = 1000
12 |
13 | private val t = new Testers
14 |
15 | @annotations.Setup(annotations.Level.Trial)
16 | def doSetup(): Unit = t.all.foreach(_.init())
17 |
18 | private def withTester(tester: LoggingTester): Unit = {
19 | val messages = (0 until Iterations).map(i => s"Test $i")
20 | tester.run(messages.iterator)
21 | }
22 |
23 | @annotations.Benchmark
24 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
25 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
26 | @annotations.OperationsPerInvocation(1000)
27 | def withScribe(): Unit = withTester(t.scribe)
28 |
29 | @annotations.Benchmark
30 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
31 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
32 | @annotations.OperationsPerInvocation(1000)
33 | def withScribeEffect(): Unit = withTester(t.scribeEffect)
34 |
35 | @annotations.Benchmark
36 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
37 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
38 | @annotations.OperationsPerInvocation(1000)
39 | def withScribeEffectParallel(): Unit = withTester(t.scribeEffectParallel)
40 |
41 | @annotations.Benchmark
42 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
43 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
44 | @annotations.OperationsPerInvocation(1000)
45 | def withScribeAsync(): Unit = withTester(t.scribeAsync)
46 |
47 | @annotations.Benchmark
48 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
49 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
50 | @annotations.OperationsPerInvocation(1000)
51 | def withLog4j(): Unit = withTester(t.log4j)
52 |
53 | @annotations.Benchmark
54 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
55 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
56 | @annotations.OperationsPerInvocation(1000)
57 | def withLog4cats(): Unit = withTester(t.log4cats)
58 |
59 | @annotations.Benchmark
60 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
61 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
62 | @annotations.OperationsPerInvocation(1000)
63 | def withLog4s(): Unit = withTester(t.log4s)
64 |
65 | @annotations.Benchmark
66 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
67 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
68 | @annotations.OperationsPerInvocation(1000)
69 | def withLog4jTrace(): Unit = withTester(t.log4jTrace)
70 |
71 | @annotations.Benchmark
72 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
73 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
74 | @annotations.OperationsPerInvocation(1000)
75 | def withScalaLogging(): Unit = withTester(t.scalaLogging)
76 |
77 | @annotations.Benchmark
78 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
79 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
80 | @annotations.OperationsPerInvocation(1000)
81 | def withLogback(): Unit = withTester(t.logback)
82 |
83 | @annotations.Benchmark
84 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime, annotations.Mode.SampleTime, annotations.Mode.Throughput))
85 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
86 | @annotations.OperationsPerInvocation(1000)
87 | def withTinyLog(): Unit = withTester(t.tinyLog)
88 |
89 | @annotations.TearDown
90 | def tearDown(): Unit = t.all.foreach(_.dispose())
91 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/LoggingStressTest.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark
2 |
3 | import scribe.file._
4 | import scribe.format.Formatter
5 | import scribe.handler.LogHandler
6 | import scribe.{Level, LogRecord, Logger}
7 |
8 | import java.nio.file.{Files, Paths}
9 | import java.util.concurrent.TimeUnit
10 | import scala.annotation.tailrec
11 |
12 | object LoggingStressTest {
13 | private var logged = 0
14 |
15 | def main(args: Array[String]): Unit = {
16 | val oneMillion = 1000000
17 | val fiveMillion = 5000000
18 | val oneHundredMillion = 100000000
19 | val elapsed = timed(fiveMillion, nullLogger())
20 | // val elapsed = timed(fiveMillion, fileLogger(Formatter.default, LogFileMode.IO))
21 | scribe.info(s"Ran in $elapsed seconds, Logged: $logged")
22 | }
23 |
24 | def stressAll(iterations: Int): Unit = {
25 | val types = List(
26 | "Null" -> nullLogger(),
27 | "Simple" -> fileLogger(Formatter.simple),
28 | "Default" -> fileLogger(Formatter.classic)
29 | )
30 | types.foreach {
31 | case (name, logger) => {
32 | val elapsed = timed(iterations, logger)
33 | scribe.info(s"$iterations for $name in $elapsed seconds")
34 | }
35 | }
36 | scribe.info("Reversing!")
37 | types.reverse.foreach {
38 | case (name, logger) => {
39 | val elapsed = timed(iterations, logger)
40 | scribe.info(s"$iterations for $name in $elapsed seconds")
41 | }
42 | }
43 | scribe.info("Completed!")
44 | }
45 |
46 | def nullLogger(): Logger = Logger("nullLogger").orphan().withHandler(new LogHandler {
47 | override def log(record: LogRecord): Unit = logged += 1
48 | }).replace()
49 |
50 | def fileLogger(formatter: Formatter): Logger = {
51 | val path = Paths.get("logs/file-logging.log")
52 | Files.deleteIfExists(path)
53 | Logger.empty.orphan().withHandler(formatter, FileWriter(path), minimumLevel = Some(Level.Info)).replace()
54 | }
55 |
56 | def timed(iterations: Int, logger: Logger): Double = {
57 | val start = System.nanoTime()
58 | hierarchicalStrain(iterations, logger)
59 | val elapsed = System.nanoTime() - start
60 | TimeUnit.MILLISECONDS.convert(elapsed, TimeUnit.NANOSECONDS) / 1000.0
61 | }
62 |
63 | @tailrec
64 | def stressLogger(iterations: Int, logger: Logger): Unit = {
65 | logger.info("Testing logging")
66 | if (iterations > 0) {
67 | stressLogger(iterations - 1, logger)
68 | }
69 | }
70 |
71 | def hierarchicalStrain(iterations: Int, logger: Logger): Unit = {
72 | val sub1 = Logger().withParent(logger).replace()
73 | val sub2 = Logger().withParent(sub1).replace()
74 | val sub3 = Logger().withParent(sub2).replace()
75 | (0 until iterations).foreach { index =>
76 | sub3.info(s"INFO $index")
77 | sub3.debug(s"DEBUG $index")
78 | sub3.trace(s"TRACE $index")
79 | }
80 | }
81 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/LoggingVisualizations.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark
2 |
3 | import perfolation._
4 | import scribe.benchmark.tester.{LoggingTester, Testers}
5 |
6 | import java.io.{BufferedWriter, File, FileOutputStream, OutputStreamWriter}
7 | import java.util.concurrent.TimeUnit
8 | import scala.annotation.tailrec
9 | import scala.io.Source
10 |
11 | object LoggingVisualizations {
12 | val Iterations: Int = 10_000_000
13 |
14 | private lazy val writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(new File("results.txt"), true)))
15 |
16 | def main(args: Array[String]): Unit = {
17 | val testers = new Testers
18 | val list = args.toList match {
19 | case name :: Nil => List(testers.all.find(_.name.equalsIgnoreCase(name)).getOrElse(throw new RuntimeException(s"Unable to find $name")))
20 | case Nil => testers.all
21 | case list => sys.error(s"Expected zero or one argument, got: $list")
22 | }
23 | list.foreach { tester =>
24 | benchmark(tester)
25 | }
26 | }
27 |
28 | def logFiles(waitForFinished: Boolean): List[File] = {
29 | val logs = new File("logs")
30 | if (logs.isDirectory) {
31 | def list = logs.listFiles().toList
32 | var waited = 0
33 | while (list.isEmpty && waitForFinished && waited < 10) {
34 | waited += 1
35 | Thread.sleep(250)
36 | }
37 | if (list.nonEmpty && waitForFinished) {
38 | @tailrec
39 | def hasChanges(lastChanges: Long): Unit = {
40 | val lastModified = list.map(_.lastModified()).max
41 | if (lastModified == lastChanges) {
42 | // Finished
43 | } else {
44 | Thread.sleep(1000L)
45 | hasChanges(lastModified)
46 | }
47 | }
48 | hasChanges(0L)
49 | }
50 | list
51 | } else {
52 | Nil
53 | }
54 | }
55 |
56 | def linesFor(file: File): Int = {
57 | val source = Source.fromFile(file)
58 | try {
59 | source.getLines().length
60 | } finally {
61 | source.close()
62 | }
63 | }
64 |
65 | def deleteLogs(): Unit = logFiles(false).foreach { f =>
66 | if (!f.delete()) {
67 | println(s"${f.getName} not able to be deleted")
68 | }
69 | }
70 |
71 | def benchmark(tester: LoggingTester): Unit = {
72 | deleteLogs()
73 | val initTime = elapsed(tester.init())
74 | val messages = (0 until Iterations).map(i => s"visualize $i").iterator
75 | val runTime = elapsed(tester.run(messages))
76 | val disposeTime = elapsed(tester.dispose())
77 | println(s"${tester.name}: Init: $initTime, Run: $runTime, Dispose: $disposeTime")
78 | var fileLines = Map.empty[String, Int]
79 | val filesTime = elapsed {
80 | val logs = logFiles(true)
81 | logs.foreach { file =>
82 | if (file.length() > 0L) {
83 | val lines = linesFor(file)
84 | println(s"\t${file.getName} created with $lines lines")
85 | fileLines += file.getName -> lines
86 | }
87 | }
88 | if (logs.isEmpty) System.err.println("*** NO LOG FILES CREATED")
89 | }
90 | println(s"\tFiles: $filesTime")
91 | writer.write(s"${tester.name}:\n")
92 | writer.write(s"\tInit: $initTime, Run: $runTime, Files: $filesTime, Lines: ${fileLines.map(t => s"${t._1}: ${t._2}").mkString(", ")}, Dispose: $disposeTime\n")
93 | writer.flush()
94 | }
95 |
96 | private def elapsed(f: => Unit): String = {
97 | val start = System.nanoTime()
98 | f
99 | val elapsed = System.nanoTime() - start
100 | val ms = TimeUnit.MILLISECONDS.convert(elapsed, TimeUnit.NANOSECONDS)
101 | s"${(ms / 1000.0).f(f = 3)} seconds"
102 | }
103 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/PerformanceBenchmark.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark
2 |
3 | import org.openjdk.jmh.annotations
4 | import scribe.output.LogOutput
5 | import scribe.output.format.OutputFormat
6 | import scribe.writer.Writer
7 | import scribe.{Level, LogRecord, Logger}
8 |
9 | import java.util.concurrent.TimeUnit
10 | import java.util.concurrent.atomic.AtomicLong
11 |
12 | @annotations.State(annotations.Scope.Thread)
13 | class PerformanceBenchmark {
14 | private lazy val debug = new AtomicLong(0L)
15 | private lazy val info = new AtomicLong(0L)
16 | private lazy val error = new AtomicLong(0L)
17 |
18 | private lazy val logger = Logger.empty.orphan().withHandler(
19 | minimumLevel = Some(Level.Info),
20 | writer = new Writer {
21 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = record.level match {
22 | case Level.Debug => debug.incrementAndGet()
23 | case Level.Info => info.incrementAndGet()
24 | case Level.Error => error.incrementAndGet()
25 | case _ => // Ignore
26 | }
27 | }
28 | )
29 |
30 | @annotations.Benchmark
31 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime))
32 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
33 | @annotations.OperationsPerInvocation(1000)
34 | def withTrace(): Unit = {
35 | (0 until 1000).foreach { index =>
36 | logger.trace(s"Value: $index")
37 | }
38 | }
39 |
40 | @annotations.Benchmark
41 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime))
42 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
43 | @annotations.OperationsPerInvocation(1000)
44 | def withDebug(): Unit = {
45 | (0 until 1000).foreach { index =>
46 | logger.debug(s"Value: $index")
47 | }
48 | }
49 |
50 | @annotations.Benchmark
51 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime))
52 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
53 | @annotations.OperationsPerInvocation(1000)
54 | def withInfo(): Unit = {
55 | (0 until 1000).foreach { index =>
56 | logger.info(s"Value: $index")
57 | }
58 | }
59 |
60 | @annotations.Benchmark
61 | @annotations.BenchmarkMode(Array(annotations.Mode.AverageTime))
62 | @annotations.OutputTimeUnit(TimeUnit.NANOSECONDS)
63 | @annotations.OperationsPerInvocation(1000)
64 | def withError(): Unit = {
65 | (0 until 1000).foreach { index =>
66 | logger.error(s"Value: $index")
67 | }
68 | }
69 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/Log4CatsLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import cats.effect._
4 | import cats.effect.unsafe.implicits.global
5 | import org.typelevel.log4cats.slf4j.Slf4jLogger
6 | import org.typelevel.log4cats.{Logger, SelfAwareStructuredLogger}
7 |
8 | class Log4CatsLoggingTester extends LoggingTester {
9 | implicit def unsafeLogger[F[_] : Sync]: SelfAwareStructuredLogger[F] = Slf4jLogger.getLoggerFromName[F]("log4cats")
10 |
11 | override def run(messages: Iterator[String]): Unit = {
12 | val logger = Logger[IO]
13 | fs2.Stream
14 | .fromIterator[IO](messages, 1000)
15 | .evalTapChunk(msg => logger.info(msg))
16 | .compile
17 | .drain
18 | .unsafeRunSync()
19 | }
20 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/Log4JLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import org.apache.logging.log4j.LogManager
4 |
5 | class Log4JLoggingTester extends LoggingTester {
6 | override def init(): Unit = assert(LogManager.getRootLogger.isInfoEnabled, "INFO is not enabled in log4j!")
7 |
8 | override def run(messages: Iterator[String]): Unit = {
9 | val logger = LogManager.getRootLogger
10 | messages.foreach(logger.info)
11 | }
12 |
13 | override def dispose(): Unit = LogManager.shutdown()
14 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/Log4JTraceLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import org.apache.logging.log4j.LogManager
4 |
5 | class Log4JTraceLoggingTester extends LoggingTester {
6 | override def run(messages: Iterator[String]): Unit = {
7 | val logger = LogManager.getLogger("Trace")
8 | messages.foreach(logger.info)
9 | }
10 |
11 | override def dispose(): Unit = LogManager.shutdown()
12 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/Log4SLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | class Log4SLoggingTester extends LoggingTester {
4 | override def run(messages: Iterator[String]): Unit = {
5 | val logger = org.log4s.getLogger("file")
6 | messages.foreach(msg => logger.info(msg))
7 | }
8 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/LogbackLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | class LogbackLoggingTester extends LoggingTester {
4 | override def run(messages: Iterator[String]): Unit = {
5 | val logger = org.slf4j.LoggerFactory.getLogger("logback")
6 | messages.foreach(logger.info)
7 | }
8 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/LoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | trait LoggingTester {
4 | lazy val name: String = getClass.getSimpleName.replace("LoggingTester", "")
5 |
6 | def init(): Unit = {}
7 |
8 | def run(messages: Iterator[String]): Unit
9 |
10 | def dispose(): Unit = {}
11 | }
12 |
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/ScalaLoggingLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import com.typesafe.config.ConfigFactory
4 | import com.typesafe.scalalogging._
5 |
6 | class ScalaLoggingLoggingTester extends LoggingTester {
7 | override def init(): Unit = ConfigFactory.load()
8 |
9 | override def run(messages: Iterator[String]): Unit = {
10 | val logger = Logger("root")
11 | messages.foreach(msg => logger.info(msg))
12 | }
13 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/ScribeAsyncLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import scribe.Logger
4 | import scribe.file._
5 | import scribe.format.{messages, _}
6 | import scribe.handler.AsynchronousLogHandle
7 |
8 | class ScribeAsyncLoggingTester extends LoggingTester {
9 | private lazy val fileWriter = FileWriter("logs" / "scribe-async.log")
10 | private lazy val formatter = formatter"$date $levelPaddedRight [$threadName] $messages"
11 | private lazy val asyncHandle = AsynchronousLogHandle()
12 | private lazy val logger = Logger.empty.orphan().withHandler(formatter = formatter, writer = fileWriter, handle = asyncHandle)
13 |
14 | override def init(): Unit = logger
15 |
16 | override def run(messages: Iterator[String]): Unit = {
17 | messages.foreach { message =>
18 | logger.info(message)
19 | }
20 | }
21 |
22 | override def dispose(): Unit = fileWriter.dispose()
23 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/ScribeEffectLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import cats.effect.IO
4 | import cats.effect.unsafe.implicits.global
5 | import scribe.Logger
6 | import scribe.cats._
7 | import scribe.file._
8 | import scribe.format._
9 |
10 | class ScribeEffectLoggingTester extends LoggingTester {
11 | private lazy val fileWriter = FileWriter("logs" / "scribe-effect.log")
12 | private lazy val formatter = formatter"$date $levelPaddedRight [$threadName] $messages"
13 | private lazy val logger = Logger.empty.orphan().withHandler(formatter = formatter, writer = fileWriter).f[IO]
14 |
15 | override def init(): Unit = logger
16 |
17 | override def run(messages: Iterator[String]): Unit = fs2.Stream
18 | .fromIterator[IO](messages, 1000)
19 | .evalTap(msg => logger.info(msg))
20 | .compile
21 | .drain
22 | .unsafeRunSync()
23 |
24 | override def dispose(): Unit = fileWriter.dispose()
25 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/ScribeEffectParallelLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import cats.effect.IO
4 | import cats.effect.unsafe.implicits.global
5 | import scribe.Logger
6 | import scribe.cats._
7 | import scribe.file._
8 | import scribe.format._
9 |
10 | class ScribeEffectParallelLoggingTester extends LoggingTester {
11 | private lazy val fileWriter = FileWriter("logs" / "scribe-effect-par.log")
12 | private lazy val formatter = formatter"$date $levelPaddedRight [$threadName] $messages"
13 | private lazy val logger = Logger.empty.orphan().withHandler(formatter = formatter, writer = fileWriter).f[IO]
14 |
15 | override def init(): Unit = logger
16 |
17 | override def run(messages: Iterator[String]): Unit = fs2.Stream
18 | .fromIterator[IO](messages, 1000)
19 | .parEvalMap(1000)(msg => logger.info(msg))
20 | .compile
21 | .drain
22 | .unsafeRunSync()
23 |
24 | override def dispose(): Unit = fileWriter.dispose()
25 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/ScribeLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import scribe.Logger
4 | import scribe.file._
5 | import scribe.format.{messages, _}
6 |
7 | class ScribeLoggingTester extends LoggingTester {
8 | private lazy val fileWriter = FileWriter("logs" / "scribe.log")
9 | private lazy val formatter = formatter"$date $levelPaddedRight [$threadName] $messages"
10 | private lazy val logger = Logger.empty.orphan().withHandler(formatter = formatter, writer = fileWriter)
11 |
12 | override def init(): Unit = logger
13 |
14 | override def run(messages: Iterator[String]): Unit = {
15 | messages.foreach { message =>
16 | logger.info(message)
17 | }
18 | }
19 |
20 | override def dispose(): Unit = fileWriter.dispose()
21 | }
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/Testers.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | class Testers {
4 | val log4cats = new Log4CatsLoggingTester
5 | val log4j = new Log4JLoggingTester
6 | val log4jTrace = new Log4JTraceLoggingTester
7 | val log4s = new Log4SLoggingTester
8 | val logback = new LogbackLoggingTester
9 | val scalaLogging = new ScalaLoggingLoggingTester
10 | val scribeAsync = new ScribeAsyncLoggingTester
11 | val scribeEffect = new ScribeEffectLoggingTester
12 | val scribeEffectParallel = new ScribeEffectParallelLoggingTester
13 | val scribe = new ScribeLoggingTester
14 | val tinyLog = new TinyLogLoggingTester
15 |
16 | val all: List[LoggingTester] = List(
17 | scribe, scribeAsync, scribeEffect, scribeEffectParallel,
18 | log4cats, log4s, scalaLogging,
19 | log4j, log4jTrace, logback, tinyLog
20 | )
21 | }
22 |
--------------------------------------------------------------------------------
/benchmarks/src/main/scala/scribe/benchmark/tester/TinyLogLoggingTester.scala:
--------------------------------------------------------------------------------
1 | package scribe.benchmark.tester
2 |
3 | import org.pmw.tinylog
4 |
5 | class TinyLogLoggingTester extends LoggingTester {
6 | override def init(): Unit = tinylog.Configurator
7 | .defaultConfig()
8 | .removeAllWriters()
9 | .level(tinylog.Level.INFO)
10 | .formatPattern("[{thread}] {class}.{method}(){level}: {message}")
11 | .writer(new tinylog.writers.FileWriter("logs/tiny.log"))
12 | .activate()
13 |
14 | override def run(messages: Iterator[String]): Unit = {
15 | messages.foreach(tinylog.Logger.info)
16 | }
17 | }
--------------------------------------------------------------------------------
/cats/shared/src/main/scala/scribe/LoggerWrapper.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import _root_.cats.effect.Sync
4 | import scribe.mdc.MDC
5 | import scribe.message.LoggableMessage
6 | import sourcecode.{FileName, Line, Name, Pkg}
7 |
8 | class LoggerWrapper[F[_]](val wrapped: Logger, val sync: Sync[F]) extends Scribe[F] {
9 | override def log(record: => LogRecord): F[Unit] = sync.delay(wrapped.log(record))
10 |
11 | override def log(level: Level, mdc: MDC, features: LogFeature*)
12 | (implicit pkg: Pkg, fileName: FileName, name: Name, line: Line): F[Unit] =
13 | sync.defer(super.log(level, mdc, features: _*)(pkg, fileName, name, line))
14 | }
15 |
--------------------------------------------------------------------------------
/cats/shared/src/main/scala/scribe/Scribe.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | trait Scribe[F[_]] extends Any with LoggerSupport[F[Unit]]
4 |
5 | object Scribe {
6 | def apply[F[_]: Scribe]: Scribe[F] = implicitly[Scribe[F]]
7 | }
--------------------------------------------------------------------------------
/cats/shared/src/main/scala/scribe/ScribeImpl.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import _root_.cats.effect._
4 | import scribe.mdc.MDC
5 | import scribe.message.LoggableMessage
6 | import sourcecode.{FileName, Line, Name, Pkg}
7 |
8 | class ScribeImpl[F[_]](val sync: Sync[F]) extends AnyVal with Scribe[F] {
9 | override def log(record: => LogRecord): F[Unit] = sync.delay(Logger(record.className).log(record))
10 |
11 | override def log(level: Level, mdc: MDC, features: LogFeature*)
12 | (implicit pkg: Pkg, fileName: FileName, name: Name, line: Line): F[Unit] =
13 | sync.defer(log(LoggerSupport(level, Nil, pkg, fileName, name, line, mdc).withFeatures(features: _*)))
14 | }
--------------------------------------------------------------------------------
/cats/shared/src/main/scala/scribe/cats.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import _root_.cats.effect._
4 | import perfolation._
5 |
6 | object cats {
7 | lazy val io: Scribe[IO] = apply[IO]
8 |
9 | implicit def effect[F[_]](implicit sync: Sync[F]): Scribe[F] = apply[F]
10 | implicit class LoggerExtras(val logger: Logger) extends AnyVal {
11 | def f[F[_]](implicit sync: Sync[F]): Scribe[F] = new LoggerWrapper[F](logger, sync)
12 | }
13 | implicit class IOExtras[Return](val io: IO[Return]) extends AnyVal {
14 | def timed(label: String)(implicit timer: Timer): IO[Return] = timer.chain(io, label)
15 | }
16 |
17 | def timer[Return](f: Timer => IO[Return]): IO[Return] = for {
18 | timer <- IO.blocking(Timer(System.currentTimeMillis()))
19 | r <- f(timer)
20 | } yield r
21 |
22 | def timed[Return](f: => IO[Return])(log: (Return, Double) => IO[Unit]): IO[Return] = timer { timer =>
23 | f.flatMap { r =>
24 | val now = System.currentTimeMillis()
25 | val elapsed = (now - timer.start) / 1000.0
26 | log(r, elapsed).map(_ => r)
27 | }
28 | }
29 |
30 | def apply[F[_]: Sync]: Scribe[F] = new ScribeImpl[F](implicitly[Sync[F]])
31 |
32 | case class Timer(start: Long) { self =>
33 | private var last: Long = start
34 |
35 | def log(label: String): IO[Unit] = for {
36 | now <- IO.blocking(System.currentTimeMillis())
37 | elapsed = ((now - start) / 1000.0).f(f = 3)
38 | previous = ((now - last) / 1000.0).f(f = 3)
39 | _ <- io.info(s"$label (Elapsed: $elapsed seconds, Since Previous: $previous seconds)")
40 | _ <- IO.blocking {
41 | self.synchronized {
42 | last = now
43 | }
44 | }
45 | } yield ()
46 |
47 | def chain[Return](prev: IO[Return], label: String): IO[Return] = prev.flatMap { r =>
48 | log(label).map(_ => r)
49 | }
50 | }
51 | }
--------------------------------------------------------------------------------
/cats/shared/src/test/scala/spec/ScribeSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import cats.MonadThrow
4 | import cats.effect._
5 | import cats.effect.testing.scalatest.AsyncIOSpec
6 | import cats.syntax.all._
7 | import org.scalatest.matchers.should.Matchers
8 | import org.scalatest.wordspec.AsyncWordSpec
9 | import scribe.handler.LogHandler
10 | import scribe.{LogRecord, Logger, Scribe}
11 |
12 | class ScribeSpec extends AsyncWordSpec with AsyncIOSpec with Matchers {
13 | "ScribeEffect" should {
14 | var messages: List[String] = Nil
15 | Logger.root
16 | .clearHandlers()
17 | .withHandler(new LogHandler {
18 | override def log(record: LogRecord): Unit = synchronized {
19 | messages = record.messages.map(_.logOutput.plainText) ::: messages
20 | }
21 | })
22 | .replace()
23 |
24 | "do cats.io logging" in {
25 | scribe.cats.io.info("1").map { _ =>
26 | messages should be(List("1"))
27 | }
28 | }
29 | "do cats[IO] logging" in {
30 | messages = Nil
31 |
32 | scribe.cats[IO].info("2").map { _ =>
33 | messages should be(List("2"))
34 | }
35 | }
36 | "do instantiation logging" in {
37 | messages = Nil
38 |
39 | import scribe.cats._
40 |
41 | val biz = new Biz[IO]
42 | biz.doStuff().map { s =>
43 | messages should be(List("3"))
44 | s should be("done")
45 | }
46 | }
47 | "do reference logging" in {
48 | messages = Nil
49 |
50 | val logger = scribe.cats[IO]
51 | logger.info("4").map { _ =>
52 | messages should be(List("4"))
53 | }
54 | }
55 | "do existing logger logging" in {
56 | messages = Nil
57 |
58 | import scribe.cats._
59 | Logger.root.f[IO].info("5").map { _ =>
60 | messages should be(List("5"))
61 | }
62 | }
63 | }
64 |
65 | class Biz[F[_]: MonadThrow: Scribe] {
66 | def doStuff(): F[String] = for {
67 | _ <- Scribe[F].info("3")
68 | } yield {
69 | "done"
70 | }
71 | }
72 | }
--------------------------------------------------------------------------------
/config/src/main/resources/moduload.list:
--------------------------------------------------------------------------------
1 | scribe.ScribeConfig
--------------------------------------------------------------------------------
/config/src/main/scala/scribe/ScribeConfig.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import fabric.rw._
4 | import moduload.Moduload
5 | import profig._
6 |
7 | object ScribeConfig extends Moduload {
8 | implicit def rw: RW[ScribeConfig] = RW.gen
9 |
10 | private var _loaded: Boolean = false
11 | def loaded: Boolean = _loaded
12 |
13 | override def load(): Unit = {
14 | Profig.initConfiguration()
15 | // val config = Profig("scribe").as[ScribeConfig]
16 | // TODO: Support
17 | _loaded = true
18 | }
19 |
20 | override def error(t: Throwable): Unit = scribe.error("Error while loading scribe-config", t)
21 | }
22 |
23 | case class ScribeConfig()
--------------------------------------------------------------------------------
/config/src/test/scala/spec/ConfigSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.{Logger, ScribeConfig}
6 |
7 | class ConfigSpec extends AnyWordSpec with Matchers {
8 | "ScribeConfig" should {
9 | "automatically load" in {
10 | Logger
11 | ScribeConfig.loaded should be(true)
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/core/js/src/main/scala/scribe/JavaScriptConsole.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scala.scalajs.js
4 |
5 | /**
6 | * Facade around extra features of the JavaScript console in the browser
7 | */
8 | @js.native
9 | trait JavaScriptConsole extends js.Object {
10 |
11 | /**
12 | * Outputs an informational message to the Web Console. In Firefox, a small "i" icon is
13 | * displayed next to these items in the Web Console's log.
14 | *
15 | * MDN
16 | */
17 | def info(message: js.Any, optionalParams: js.Any*): Unit = js.native
18 |
19 | def profile(reportName: String = js.native): Unit = js.native
20 |
21 | def assert(test: Boolean, message: String,
22 | optionalParams: js.Any*): Unit = js.native
23 |
24 | def clear(): Unit = js.native
25 |
26 | /**
27 | * Displays an interactive list of the properties of the specified JavaScript
28 | * object. The output is presented as a hierarchical listing with disclosure
29 | * triangles that let you see the contents of child objects.
30 | *
31 | * MDN
32 | */
33 | def dir(value: js.Any, optionalParams: js.Any*): Unit = js.native
34 |
35 | /**
36 | * Outputs a warning message. You may use string substitution and additional
37 | * arguments with this method. See Using string substitutions.
38 | *
39 | * MDN
40 | */
41 | def warn(message: js.Any, optionalParams: js.Any*): Unit = js.native
42 |
43 | /**
44 | * Outputs an error message. You may use string substitution and additional
45 | * arguments with this method. See Using string substitutions.
46 | *
47 | * MDN
48 | */
49 | def error(message: js.Any, optionalParams: js.Any*): Unit = js.native
50 |
51 | /**
52 | * For general output of logging information. You may use string substitution and
53 | * additional arguments with this method. See Using string substitutions.
54 | *
55 | * MDN
56 | */
57 | def log(message: js.Any, optionalParams: js.Any*): Unit = js.native
58 |
59 | def profileEnd(): Unit = js.native
60 | }
61 |
--------------------------------------------------------------------------------
/core/js/src/main/scala/scribe/Platform.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.output.format.{ANSIOutputFormat, ASCIIOutputFormat, OutputFormat, RichBrowserOutputFormat}
4 | import scribe.writer.{BrowserConsoleWriter, Writer}
5 |
6 | import scala.concurrent.ExecutionContext
7 | import scala.scalajs.js
8 | import scala.scalajs.js.Dictionary
9 | import scala.util.Try
10 |
11 | object Platform extends PlatformImplementation {
12 | def isJVM: Boolean = false
13 | def isJS: Boolean = true
14 | def isNative: Boolean = false
15 |
16 | // $COVERAGE-OFF$
17 | lazy val isNodeJS: Boolean = Try(js.Dynamic.global.process.release.name.asInstanceOf[String]).toOption.contains("node")
18 |
19 | def init(): Unit = {}
20 |
21 | def console: JavaScriptConsole = js.Dynamic.global.console.asInstanceOf[JavaScriptConsole]
22 |
23 | private def processEnv: Dictionary[Any] = Try(js.Dynamic.global.process.env.asInstanceOf[js.Dictionary[Any]])
24 | .getOrElse(js.Dictionary.empty)
25 | // $COVERAGE-ON$
26 |
27 | override def env(key: String): Option[String] = processEnv.get(key).map(_.toString)
28 |
29 | override def outputFormat(): OutputFormat = if (isNodeJS) {
30 | super.outputFormat()
31 | } else {
32 | RichBrowserOutputFormat
33 | }
34 |
35 | override def consoleWriter: Writer = BrowserConsoleWriter
36 |
37 | override val columns: Int = 120 + columnsAdjust
38 |
39 | override def rows: Int = -1
40 |
41 | override def executionContext: ExecutionContext = scala.scalajs.concurrent.JSExecutionContext.queue
42 | }
--------------------------------------------------------------------------------
/core/js/src/main/scala/scribe/output/format/RichBrowserOutputFormat.scala:
--------------------------------------------------------------------------------
1 | package scribe.output.format
2 |
3 | import scribe.output._
4 | import scribe.writer.BrowserConsoleWriter
5 |
6 | /**
7 | * Supports rich output to JavaScript console in the browser
8 | */
9 | object RichBrowserOutputFormat extends OutputFormat {
10 | import BrowserConsoleWriter.args
11 |
12 | override def apply(output: LogOutput, stream: String => Unit): Unit = recurse(output, stream)
13 |
14 | private def recurse(output: LogOutput, stream: String => Unit): Unit = {
15 | def withArg(key: String, value: String, output: LogOutput): Unit = {
16 | stream("%c")
17 | args.around(key -> value) {
18 | recurse(output, stream)
19 | }
20 | stream("%c")
21 | }
22 | output match {
23 | case o: TextOutput => stream(o.plainText)
24 | case o: CompositeOutput => o.entries.foreach(recurse(_, stream))
25 | case o: ColoredOutput => withArg("color", color2CSS(o.color), o.output)
26 | case o: BackgroundColoredOutput => withArg("background-color", color2CSS(o.color), o.output)
27 | case o: URLOutput =>
28 | stream("%o (")
29 | args.around("::URL" -> o.url) {
30 | recurse(o.output, stream)
31 | }
32 | stream(")")
33 | case o: BoldOutput => withArg("font-weight", "bold", o.output)
34 | case o: ItalicOutput => withArg("font-style", "italic", o.output)
35 | case o: UnderlineOutput => withArg("text-decoration", "underline", o.output)
36 | case o: StrikethroughOutput => withArg("text-decoration", "line-through", o.output)
37 | case _ => stream(output.plainText)
38 | }
39 | }
40 |
41 | private def color2CSS(color: Color): String = color match {
42 | case Color.Black => "black"
43 | case Color.Blue => "blue"
44 | case Color.Cyan => "cyan"
45 | case Color.Green => "green"
46 | case Color.Magenta => "magenta"
47 | case Color.Red => "red"
48 | case Color.White => "white"
49 | case Color.Yellow => "yellow"
50 | case Color.Gray => "gray"
51 | case Color.BrightBlue => "lightblue"
52 | case Color.BrightCyan => "lightcyan"
53 | case Color.BrightGreen => "lime"
54 | case Color.BrightMagenta => "violet"
55 | case Color.BrightRed => "crimson"
56 | case Color.BrightWhite => "white"
57 | case Color.BrightYellow => "lightyellow"
58 | }
59 | }
--------------------------------------------------------------------------------
/core/js/src/main/scala/scribe/writer/BrowserConsoleWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe.Platform._
4 | import scribe._
5 | import scribe.output._
6 | import scribe.output.format.OutputFormat
7 |
8 | import scala.collection.mutable
9 | import scala.collection.mutable.ListBuffer
10 | import scala.scalajs.js
11 |
12 | /**
13 | * Writer specifically to target the JavaScript console in the browser
14 | */
15 | object BrowserConsoleWriter extends Writer {
16 | private var map = Map.empty[String, String]
17 | private var argsList = List.empty[String]
18 |
19 | object args {
20 | def around[Return](t: (String, String))(f: => Return): Return = {
21 | this += t
22 | try {
23 | f
24 | } finally {
25 | this -= t._1
26 | }
27 | }
28 | private def append(): Unit = argsList = map.map {
29 | case (key, value) if key.startsWith("::") => value
30 | case (key, value) => s"$key: $value"
31 | }.mkString("; ") :: argsList
32 | def +=(t: (String, String)): Unit = {
33 | map += t
34 | append()
35 | }
36 | def -=(key: String): Unit = {
37 | map -= key
38 | append()
39 | }
40 | }
41 |
42 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
43 | val b = new mutable.StringBuilder
44 | map = Map.empty
45 | argsList = Nil
46 | outputFormat.begin(b.append(_))
47 | outputFormat(output, b.append(_))
48 | outputFormat.end(b.append(_))
49 |
50 | val jsArgs = argsList.map(js.Any.fromString).reverse
51 | if (record.level >= Level.Error) {
52 | console.error(b.toString(), jsArgs: _*)
53 | } else if (record.level >= Level.Warn) {
54 | console.warn(b.toString(), jsArgs: _*)
55 | } else {
56 | console.log(b.toString(), jsArgs: _*)
57 | }
58 | }
59 | }
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/scribe/Platform.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import moduload.Moduload
4 | import scribe.output.format.{ANSIOutputFormat, ASCIIOutputFormat, OutputFormat}
5 | import scribe.writer.{SystemWriter, Writer}
6 |
7 | import java.io.{BufferedReader, File, InputStreamReader}
8 | import java.nio.file.Files
9 | import scala.concurrent.ExecutionContext
10 | import scala.util.Try
11 |
12 | object Platform extends PlatformImplementation {
13 | var maximumColumns: Int = 5000
14 | var minimumColumns: Int = 10
15 | var minimumRows: Int = 5
16 | var columnsOverride: Option[Int] = None
17 | var rowsOverride: Option[Int] = None
18 |
19 | private var lastChecked: Long = 0L
20 | private var cachedColumns: Int = -1
21 | private var cachedRows: Int = -1
22 |
23 | private lazy val cacheDirectory = {
24 | val f = new File(System.getProperty("user.home"), ".cache/scribe")
25 | f.mkdirs()
26 | f
27 | }
28 |
29 | var columnCheckFrequency: Long = 5 * 1000L
30 |
31 | def isJVM: Boolean = true
32 | def isJS: Boolean = false
33 | def isNative: Boolean = false
34 |
35 | def init(): Unit = {
36 | // Load Moduload
37 | Moduload.load()
38 | }
39 |
40 | override def consoleWriter: Writer = SystemWriter
41 |
42 | override def columns: Int = columnsOverride.getOrElse {
43 | updateConsoleSize()
44 | if (cachedColumns < minimumColumns) {
45 | maximumColumns
46 | } else {
47 | cachedColumns + columnsAdjust
48 | }
49 | }
50 |
51 |
52 | override def rows: Int = rowsOverride.getOrElse {
53 | updateConsoleSize()
54 | if (cachedRows < minimumRows) {
55 | -1
56 | } else {
57 | cachedRows
58 | }
59 | }
60 |
61 | private def updateConsoleSize(): Unit = {
62 | val now = System.currentTimeMillis()
63 | if (now - lastChecked >= columnCheckFrequency) {
64 | lastChecked = now
65 | val (c, r) = queryTput()
66 | cachedColumns = c
67 | cachedRows = r
68 | }
69 | }
70 |
71 | def queryTput(): (Int, Int) = Try {
72 | val pb = new ProcessBuilder("bash", "-c", "tput cols lines 2> /dev/tty")
73 | val p = pb.start()
74 | val i = new BufferedReader(new InputStreamReader(p.getInputStream))
75 | try {
76 | val columns = i.readLine().trim.toInt
77 | val rows = i.readLine().trim.toInt
78 | (columns, rows)
79 | } finally {
80 | i.close()
81 | }
82 | }.getOrElse((-1, -1))
83 |
84 | private lazy val scriptFile = new File(cacheDirectory, "cursor-position.sh")
85 |
86 | def cursor(): (Int, Int) = Try {
87 | if (!scriptFile.isFile) {
88 | val script =
89 | """exec < /dev/tty
90 | |oldstty=$(stty -g)
91 | |stty raw -echo min 0
92 | |echo -en "\033[6n" > /dev/tty
93 | |IFS=';' read -r -d R -a pos
94 | |stty $oldstty
95 | |row=$((${pos[0]:2} - 1))
96 | |col=$((${pos[1]} - 1))
97 | |echo $row $col""".stripMargin
98 | Files.write(scriptFile.toPath, script.getBytes("UTF-8"))
99 | val pb = new ProcessBuilder("bash", s"chmod +x ${scriptFile.getCanonicalPath}")
100 | pb.start().waitFor()
101 | }
102 | val pb = new ProcessBuilder("bash", scriptFile.getCanonicalPath)
103 | val p = pb.start()
104 | val i = new BufferedReader(new InputStreamReader(p.getInputStream))
105 | try {
106 | val line = i.readLine()
107 | val (row, col) = line.splitAt(line.indexOf(' '))
108 | (row.trim.toInt, col.trim.toInt)
109 | } finally {
110 | i.close()
111 | }
112 | }.getOrElse((-1, -1))
113 |
114 | override def executionContext: ExecutionContext = ExecutionContext.global
115 | }
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/scribe/handler/AsynchronousLogHandle.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.LogRecord
4 |
5 | import java.util.concurrent.ConcurrentLinkedQueue
6 | import java.util.concurrent.atomic.AtomicLong
7 | import scala.annotation.tailrec
8 | import scala.language.implicitConversions
9 |
10 | /**
11 | * Provides support for asynchronous logging to process the log record in another thread and avoid any blocking.
12 | *
13 | * @param formatter the formatter to use (defaults to Formatter.default)
14 | * @param writer the writer to use (defaults to ConsoleWriter)
15 | * @param outputFormat the output format to use (defaults to OutputFormat.default)
16 | * @param modifiers the modifiers
17 | * @param maxBuffer the maximum buffer before overflow occurs (defaults to AsynchronousLogHandler.DefaultMaxBuffer)
18 | * @param overflow what to do with overflows (defaults to DropOld)
19 | */
20 | case class AsynchronousLogHandle(maxBuffer: Int = AsynchronousLogHandle.DefaultMaxBuffer,
21 | overflow: Overflow = Overflow.DropOld) extends LogHandle {
22 | private lazy val cached = new AtomicLong(0L)
23 |
24 | private lazy val queue = {
25 | val q = new ConcurrentLinkedQueue[(LogHandlerBuilder, LogRecord)]
26 | val t = new Thread {
27 | setDaemon(true)
28 |
29 | override def run(): Unit = while (true) {
30 | if (flushNext()) {
31 | Thread.sleep(1L)
32 | } else {
33 | Thread.sleep(10L)
34 | }
35 | }
36 | }
37 | t.start()
38 | q
39 | }
40 |
41 | def flushNext(): Boolean = Option(queue.poll()) match {
42 | case Some((handler, record)) => {
43 | cached.decrementAndGet()
44 | SynchronousLogHandle.log(handler, record)
45 | true
46 | }
47 | case None => false
48 | }
49 |
50 | @tailrec
51 | final def flush(): Unit = if (flushNext()) {
52 | flush()
53 | }
54 |
55 | def withMaxBuffer(maxBuffer: Int): AsynchronousLogHandle = copy(maxBuffer = maxBuffer)
56 |
57 | def withOverflow(overflow: Overflow): AsynchronousLogHandle = copy(overflow = overflow)
58 |
59 | override def log(handler: LogHandlerBuilder, record: LogRecord): Unit = {
60 | val add = if (!cached.incrementIfLessThan(maxBuffer)) {
61 | overflow match {
62 | case Overflow.DropOld => {
63 | queue.poll()
64 | true
65 | }
66 | case Overflow.DropNew => false
67 | case Overflow.Block => {
68 | while(!cached.incrementIfLessThan(maxBuffer)) {
69 | Thread.sleep(1L)
70 | }
71 | true
72 | }
73 | case Overflow.Error => throw new LogOverflowException(s"Queue filled (max: $maxBuffer) while attempting to asynchronously log")
74 | }
75 | } else {
76 | true
77 | }
78 | if (add) {
79 | queue.add(handler -> record)
80 | }
81 | }
82 | }
83 |
84 | object AsynchronousLogHandle {
85 | /**
86 | * The default max buffer of log records (set to 1000)
87 | */
88 | val DefaultMaxBuffer: Int = 1000
89 | }
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/scribe/handler/AtomicLongExtras.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import java.util.concurrent.atomic.AtomicLong
4 | import scala.annotation.tailrec
5 |
6 | class AtomicLongExtras(val value: AtomicLong) extends AnyVal {
7 | /**
8 | * Modifies the value atomicly without locking if the resulting value of the function is Some.
9 | */
10 | @tailrec
11 | final def modify(f: Long => Option[Long]): Boolean = {
12 | val current = value.get()
13 | f(current) match {
14 | case Some(v) => if (value.compareAndSet(current, v)) {
15 | true
16 | }
17 | else {
18 | modify(f)
19 | }
20 | case None => false
21 | }
22 | }
23 |
24 | /**
25 | * Increments and returns the new value
26 | */
27 | def ++ : Long = value.incrementAndGet()
28 |
29 | /**
30 | * Decrements and returns the new value
31 | */
32 | def -- : Long = value.decrementAndGet()
33 |
34 | /**
35 | * Adds the value and returns the new value
36 | */
37 | def +=(value: Long): Long = this.value.addAndGet(value)
38 |
39 | /**
40 | * Subtracts the value and returns the new value
41 | */
42 | def -=(value: Long): Long = this.value.addAndGet(-value)
43 |
44 | /**
45 | * Increments the value if the current value is less than the max value supplied.
46 | *
47 | * This method is thread-safe without locking.
48 | */
49 | def incrementIfLessThan(max: Int): Boolean = modify((value: Long) => {
50 | if (value < max) {
51 | Some(value + 1)
52 | }
53 | else {
54 | None
55 | }
56 | })
57 |
58 | /**
59 | * Decrements the value if the current value is greater than the max value supplied.
60 | *
61 | * This method is thread-safe without locking.
62 | */
63 | def decrementIfGreaterThan(min: Long): Boolean = modify((value: Long) => {
64 | if (value > min) {
65 | Some(value - 1)
66 | }
67 | else {
68 | None
69 | }
70 | })
71 |
72 | def setIfCondition(condition: Long => Boolean, value: Long): Boolean = modify((value: Long) => {
73 | Some(value).filter(condition)
74 | })
75 | }
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/scribe/handler/CachingLogHandler.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.LogRecord
4 |
5 | import java.util.concurrent.ConcurrentLinkedQueue
6 | import java.util.concurrent.atomic.AtomicLong
7 |
8 | /**
9 | * CachingLogHandler provides a convenient LogHandler to cache LogRecords and drop old records if the record count
10 | * overflows.
11 | */
12 | case class CachingLogHandler(maxBuffer: Int = CachingLogHandler.DefaultMaxBuffer) extends LogHandler {
13 | private lazy val cached = new AtomicLong(0L)
14 | private lazy val queue = new ConcurrentLinkedQueue[LogRecord]
15 |
16 | override def log(record: LogRecord): Unit = {
17 | if (!cached.incrementIfLessThan(maxBuffer)) {
18 | queue.poll() // Drop oldest
19 | }
20 | queue.add(record)
21 | }
22 |
23 | def poll(): Option[LogRecord] = {
24 | val option = Option(queue.poll())
25 | if (option.nonEmpty) cached.decrementAndGet()
26 | option
27 | }
28 |
29 | def size: Long = cached.get()
30 | }
31 |
32 | object CachingLogHandler {
33 | /**
34 | * The default max buffer of log records (set to 1000)
35 | */
36 | val DefaultMaxBuffer: Int = 1000
37 | }
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/scribe/handler/Overflow.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | sealed trait Overflow
4 |
5 | /**
6 | * Overflow instructions for AsynchronousLogHandler
7 | */
8 | object Overflow {
9 | /**
10 | * Drops oldest over max buffer
11 | */
12 | case object DropOld extends Overflow
13 |
14 | /**
15 | * Drops the new messages
16 | */
17 | case object DropNew extends Overflow
18 |
19 | /**
20 | * Blocks until the buffer falls below max
21 | */
22 | case object Block extends Overflow
23 |
24 | /**
25 | * Throws an exception if the buffer overflows
26 | */
27 | case object Error extends Overflow
28 | }
--------------------------------------------------------------------------------
/core/jvm/src/main/scala/scribe/handler/package.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import java.util.concurrent.atomic.AtomicLong
4 | import scala.language.implicitConversions
5 |
6 | package object handler {
7 | implicit def atomicExtras(l: AtomicLong): AtomicLongExtras = new AtomicLongExtras(l)
8 | }
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/specs/AsynchronousLoggingSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.Execution.global
6 | import scribe.format._
7 | import scribe.output.LogOutput
8 | import scribe.output.format.OutputFormat
9 | import scribe.writer.Writer
10 | import scribe.{LogRecord, Logger}
11 |
12 | import java.util.concurrent.ConcurrentLinkedQueue
13 | import scala.concurrent.Future
14 | import scala.jdk.CollectionConverters._
15 | import scala.language.implicitConversions
16 |
17 | class AsynchronousLoggingSpec extends AnyWordSpec with Matchers {
18 | private val Regex = """(\d+) - (.+)""".r
19 | private val threads = "abcdefghijklmnopqrstuvwxyz"
20 | private val iterations = 10
21 | private val total = threads.length * iterations
22 |
23 | "Asynchronous Logging" should {
24 | s"log $total records in the proper order with simple logging" in {
25 | val queue = new ConcurrentLinkedQueue[String]
26 | val logger = Logger.empty.orphan().withHandler(
27 | formatter = AsynchronousLoggingSpec.format,
28 | writer = new Writer {
29 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = queue.add(output.plainText.trim)
30 | }
31 | )
32 |
33 | Future.sequence(threads.map { char =>
34 | Future {
35 | (0 until iterations).foreach { index =>
36 | logger.info(s"$char:$index")
37 | }
38 | }
39 | }).map { _ =>
40 | var previous = 0L
41 | queue.iterator().asScala.foreach {
42 | case Regex(ts, _) => {
43 | val timeStamp = ts.toLong
44 | timeStamp should be >= previous
45 | previous = timeStamp
46 | }
47 | }
48 | queue.size() should be(total)
49 | }
50 | }
51 | /*s"log $total records in the proper order with file logging" in {
52 | val file = new File("logs/app.log")
53 | file.delete()
54 |
55 | val fileWriter = FileWriter().nio
56 | val logger = Logger.empty.orphan().withHandler(
57 | formatter = AsynchronousLoggingSpec.format,
58 | writer = fileWriter,
59 | outputFormat = ASCIIOutputFormat
60 | )
61 |
62 | Future.sequence(threads.map { char =>
63 | Future {
64 | (0 until iterations).foreach { index =>
65 | logger.info(s"$char:$index")
66 | }
67 | }
68 | }).map { _ =>
69 | var previous = 0L
70 | fileWriter.flush()
71 | fileWriter.dispose()
72 | val source = Source.fromFile(file)
73 | try {
74 | val lines = source.getLines().toList
75 | lines.foreach {
76 | case Regex(ts, message) => {
77 | val timeStamp = ts.toLong
78 | timeStamp should be >= previous
79 | previous = timeStamp
80 | }
81 | }
82 | lines.length should be(threads.length * iterations)
83 | } finally {
84 | source.close()
85 | }
86 | }
87 | }*/
88 | }
89 | }
90 |
91 | object AsynchronousLoggingSpec {
92 | val format = formatter"$timeStamp - $messages"
93 | }
--------------------------------------------------------------------------------
/core/jvm/src/test/scala/specs/MDCThreadSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.Logger
6 | import scribe.mdc._
7 | import scribe.writer.CacheWriter
8 |
9 | class MDCThreadSpec extends AnyWordSpec with Matchers {
10 | private lazy val writer = new CacheWriter
11 | private lazy val logger = Logger("mdc-test").orphan().withHandler(writer = writer)
12 |
13 | "MDC" should {
14 | "verify concurrency access to implicits" in {
15 | implicit val mdc: MDC = MDC.instance
16 | logger.info("Zero")
17 | mdc.context("test" -> "testing") {
18 | logger.info("One")
19 | mdc.context("test" -> "testing2") {
20 | logger.info("Two")
21 | var finished = false
22 | new Thread {
23 | override def run(): Unit = {
24 | logger.info("Three")
25 | finished = true
26 | }
27 | }.start()
28 | while (!finished) {
29 | Thread.sleep(10)
30 | }
31 | }
32 | logger.info("Four")
33 | }
34 | logger.info("Five")
35 | writer.consume { list =>
36 | list.map(r => r.messages.head.logOutput.plainText -> r.data.get("test").map(_())) should be(List(
37 | "Five" -> None,
38 | "Four" -> Some("testing"),
39 | "Three" -> Some("testing2"),
40 | "Two" -> Some("testing2"),
41 | "One" -> Some("testing"),
42 | "Zero" -> None
43 | ))
44 | }
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/core/native/src/main/scala/scribe/Platform.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.output.format.{ANSIOutputFormat, ASCIIOutputFormat, OutputFormat}
4 | import scribe.writer.{SystemWriter, Writer}
5 |
6 | import scala.concurrent.ExecutionContext
7 |
8 | object Platform extends PlatformImplementation {
9 | def isJVM: Boolean = false
10 | def isJS: Boolean = false
11 | def isNative: Boolean = true
12 |
13 | def init(): Unit = {}
14 |
15 | override def consoleWriter: Writer = SystemWriter
16 |
17 | override val columns: Int = 120 + columnsAdjust
18 |
19 | override def rows: Int = -1
20 |
21 | override def executionContext: ExecutionContext = ExecutionContext.global
22 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/Execution.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scala.concurrent.ExecutionContext
4 |
5 | object Execution {
6 | implicit def global: ExecutionContext = Platform.executionContext
7 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/Level.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | case class Level(name: String, value: Double) extends LogFeature {
4 | def namePadded: String = Level.padded(this)
5 |
6 | Level.add(this)
7 |
8 | override def apply(record: LogRecord): LogRecord = record.copy(level = this, levelValue = value)
9 | }
10 |
11 | object Level {
12 | private var maxLength = 0
13 |
14 | private var map = Map.empty[String, Level]
15 | private var padded = Map.empty[Level, String]
16 |
17 | implicit final val LevelOrdering: Ordering[Level] = Ordering.by[Level, Double](_.value).reverse
18 |
19 | val Trace: Level = Level("TRACE", 100.0)
20 | val Debug: Level = Level("DEBUG", 200.0)
21 | val Info: Level = Level("INFO", 300.0)
22 | val Warn: Level = Level("WARN", 400.0)
23 | val Error: Level = Level("ERROR", 500.0)
24 | val Fatal: Level = Level("FATAL", 600.0)
25 |
26 | def add(level: Level): Unit = synchronized {
27 | val length = level.name.length
28 | map += level.name.toLowerCase -> level
29 | if (length > maxLength) {
30 | maxLength = length
31 | padded = map.map {
32 | case (_, level) => level -> level.name.padTo(maxLength, " ").mkString
33 | }
34 | } else {
35 | padded += level -> level.name.padTo(maxLength, " ").mkString
36 | }
37 | }
38 |
39 | def get(name: String): Option[Level] = map.get(name.toLowerCase)
40 |
41 | def apply(name: String): Level = get(name).getOrElse(throw new RuntimeException(s"Level not found by name: $name"))
42 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/LogFeature.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.message.LoggableMessage
4 | import scribe.output.LogOutput
5 | import scribe.throwable.TraceLoggableMessage
6 |
7 | import scala.language.implicitConversions
8 |
9 | trait LogFeature {
10 | def apply(record: LogRecord): LogRecord
11 | }
12 |
13 | object LogFeature {
14 | def apply(f: LogRecord => LogRecord): LogFeature = (record: LogRecord) => f(record)
15 |
16 | implicit def stringFunc2LoggableMessage(f: () => String): LogFeature = LoggableMessage.string2LoggableMessage(f())
17 | implicit def string2LoggableMessage(s: => String): LogFeature = LoggableMessage.string2LoggableMessage(s)
18 | implicit def logOutput2LoggableMessage(lo: => LogOutput): LogFeature = LoggableMessage[LogOutput](identity)(lo)
19 | implicit def throwable2LoggableMessage(throwable: => Throwable): LogFeature = TraceLoggableMessage(throwable)
20 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/LogRecord.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.format.FormatBlock
4 | import scribe.format.FormatBlock.NewLine
5 | import scribe.message.LoggableMessage
6 | import scribe.modify.LogModifier
7 | import scribe.output.{CompositeOutput, LogOutput}
8 | import scribe.util.Time
9 |
10 | import java.util.concurrent.atomic.AtomicLong
11 | import scala.annotation.tailrec
12 |
13 | case class LogRecord(level: Level,
14 | levelValue: Double,
15 | messages: List[LoggableMessage],
16 | fileName: String,
17 | className: String,
18 | methodName: Option[String],
19 | line: Option[Int],
20 | column: Option[Int],
21 | thread: Thread = Thread.currentThread(),
22 | data: Map[String, () => Any] = Map.empty,
23 | timeStamp: Long = Time()) {
24 | protected var appliedModifierIds = Set.empty[String]
25 |
26 | final val id: Long = LogRecord.incrementor.incrementAndGet()
27 |
28 | lazy val logOutput: LogOutput = generateLogOutput()
29 |
30 | protected def generateLogOutput(): LogOutput = messages match {
31 | case msg :: Nil => msg.logOutput
32 | case list => new CompositeOutput(
33 | list.flatMap { message =>
34 | List(LogRecord.messageSeparator.format(this), message.logOutput)
35 | }.drop(1)
36 | )
37 | }
38 |
39 | def withFeatures(features: LogFeature*): LogRecord = features.foldLeft(this)((record, feature) => feature(record))
40 |
41 | def withMessages(messages: LoggableMessage*): LogRecord = copy(messages = this.messages ::: messages.toList)
42 |
43 | def get(key: String): Option[Any] = data.get(key).map(_())
44 | def update(key: String, value: () => Any): LogRecord = copy(data = data + (key -> value))
45 |
46 | def boost(booster: Double => Double): LogRecord = copy(levelValue = booster(levelValue))
47 | def checkModifierId(id: String, add: Boolean = true): Boolean = id match {
48 | case "" => false
49 | case _ if appliedModifierIds.contains(id) => true
50 | case _ =>
51 | if (add) appliedModifierIds += id
52 | false
53 | }
54 | def modify(modifier: LogModifier): Option[LogRecord] = if (checkModifierId(modifier.id)) {
55 | Some(this)
56 | } else {
57 | modifier(this)
58 | }
59 | @tailrec
60 | final def modify(modifiers: List[LogModifier]): Option[LogRecord] = if (modifiers.isEmpty) {
61 | Some(this)
62 | } else {
63 | modify(modifiers.head) match {
64 | case None => None
65 | case Some(record) => record.modify(modifiers.tail)
66 | }
67 | }
68 | }
69 |
70 | object LogRecord {
71 | private val incrementor = new AtomicLong(0L)
72 |
73 | /**
74 | * The separator between multiple messages for the same LogRecord. Defaults to NewLine.
75 | */
76 | var messageSeparator: FormatBlock = NewLine
77 |
78 | def simple(message: String,
79 | fileName: String,
80 | className: String,
81 | methodName: Option[String] = None,
82 | line: Option[Int] = None,
83 | column: Option[Int] = None,
84 | level: Level = Level.Info,
85 | thread: Thread = Thread.currentThread(),
86 | data: Map[String, () => Any] = Map.empty,
87 | timeStamp: Long = Time()): LogRecord = {
88 | apply(
89 | level = level,
90 | levelValue = level.value,
91 | messages = List(message),
92 | fileName = fileName,
93 | className = className,
94 | methodName = methodName,
95 | line = line,
96 | column = column,
97 | thread = thread,
98 | data = data,
99 | timeStamp = timeStamp
100 | )
101 | }
102 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/LogRecordCreator.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.message.LoggableMessage
4 | import scribe.util.Time
5 |
6 | trait LogRecordCreator {
7 | def apply(level: Level,
8 | value: Double,
9 | messages: List[LoggableMessage],
10 | fileName: String,
11 | className: String,
12 | methodName: Option[String],
13 | line: Option[Int],
14 | column: Option[Int],
15 | thread: Thread = Thread.currentThread(),
16 | data: Map[String, () => Any] = Map.empty,
17 | timeStamp: Long = Time()): LogRecord
18 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/Loggable.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.output.LogOutput
4 |
5 | import scala.language.implicitConversions
6 |
7 | trait Loggable[-T] {
8 | def apply(value: T): LogOutput
9 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/LoggerId.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import java.util.concurrent.atomic.AtomicLong
4 |
5 | final case class LoggerId(value: Long) extends AnyVal
6 |
7 | object LoggerId {
8 | private val counter = new AtomicLong(0L)
9 |
10 | def apply(): LoggerId = new LoggerId(counter.incrementAndGet())
11 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/Logging.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | /**
4 | * Logging is a mix-in to conveniently add logging support to any class or object.
5 | */
6 | trait Logging {
7 | /**
8 | * Override this to change the name of the underlying logger.
9 | *
10 | * Defaults to class name with package
11 | */
12 | protected def loggerName: String = getClass.getName
13 |
14 | /**
15 | * The logger for this class.
16 | */
17 | protected def logger: Logger = Logger(loggerName)
18 | }
19 |
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/LoggingOutputStream.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import java.io.OutputStream
4 | import scala.collection.mutable
5 |
6 | class LoggingOutputStream(loggerId: LoggerId,
7 | level: Level,
8 | className: String,
9 | methodName: Option[String]) extends OutputStream {
10 | private lazy val b = new mutable.StringBuilder
11 |
12 | override def write(byte: Int): Unit = byte.toChar match {
13 | case '\n' => {
14 | Logger(loggerId).logDirect(level, List(b.toString()), className = className, methodName = methodName)
15 | b.clear()
16 | }
17 | case c => b.append(c)
18 | }
19 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/MinimumLevel.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | trait MinimumLevel {
4 | def logger: Logger
5 |
6 | def minimumLevel: Level
7 | }
8 |
9 | object MinimumLevel {
10 | implicit class FromString(in: (String, Level)) extends MinimumLevel {
11 | override def logger: Logger = Logger(in._1)
12 |
13 | override def minimumLevel: Level = in._2
14 | }
15 |
16 | implicit class FromClass(in: (Class[_], Level)) extends MinimumLevel {
17 | override def logger: Logger = Logger(in._1.getName)
18 |
19 | override def minimumLevel: Level = in._2
20 | }
21 |
22 | implicit class FromLogger(in: (Logger, Level)) extends MinimumLevel {
23 | override def logger: Logger = in._1
24 |
25 | override def minimumLevel: Level = in._2
26 | }
27 |
28 | implicit class FromId(in: (LoggerId, Level)) extends MinimumLevel {
29 | override def logger: Logger = Logger(in._1)
30 |
31 | override def minimumLevel: Level = in._2
32 | }
33 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/PlatformImplementation.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.output.format.{ANSIOutputFormat, ASCIIOutputFormat, OutputFormat}
4 | import scribe.writer.Writer
5 |
6 | import scala.concurrent.ExecutionContext
7 |
8 | trait PlatformImplementation {
9 | var columnsAdjust: Int = 0
10 |
11 | def isJVM: Boolean
12 | def isJS: Boolean
13 | def isNative: Boolean
14 |
15 | def consoleWriter: Writer
16 |
17 | def columns: Int
18 | def rows: Int
19 |
20 | def executionContext: ExecutionContext
21 |
22 | def env(key: String): Option[String] = sys.env.get(key)
23 |
24 | lazy val supportsANSI: Boolean = env("TERM").nonEmpty
25 |
26 | def outputFormat(): OutputFormat = env("SCRIBE_OUTPUT_FORMAT").map(_.toUpperCase) match {
27 | case Some("ANSI") => ANSIOutputFormat
28 | case Some("ASCII") => ASCIIOutputFormat
29 | case None if supportsANSI => ANSIOutputFormat
30 | case None => ASCIIOutputFormat
31 | case f =>
32 | System.err.println(s"Unexpected output format specified in SCRIBE_OUTPUT_FORMAT: $f, using ASCII")
33 | ASCIIOutputFormat
34 | }
35 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/Priority.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | class Priority(val value: Double) extends AnyVal {
4 | def +(value: Double): Priority = new Priority(value + value)
5 | def -(value: Double): Priority = new Priority(value - value)
6 | }
7 |
8 | object Priority {
9 | implicit final val PriorityOrdering: Ordering[Priority] = Ordering.by[Priority, Double](_.value).reverse
10 |
11 | lazy val Highest: Priority = new Priority(Double.MaxValue)
12 | lazy val Critical: Priority = new Priority(1000.0)
13 | lazy val Important: Priority = new Priority(100.0)
14 | lazy val High: Priority = new Priority(10.0)
15 | lazy val Normal: Priority = new Priority(0.0)
16 | lazy val Low: Priority = new Priority(-10.0)
17 | lazy val Lower: Priority = new Priority(-100.0)
18 | lazy val Fallthrough: Priority = new Priority(-1000.0)
19 | lazy val Lowest: Priority = new Priority(Double.MinValue)
20 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/AndFilters.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.LogRecord
4 |
5 | case class AndFilters(filters: List[Filter]) extends Filter {
6 | override def matches(record: LogRecord): Boolean = filters.forall(_.matches(record))
7 |
8 | override def &&(that: Filter): Filter = copy(filters ::: List(that))
9 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/ClassNameFilter.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.LogRecord
4 |
5 | /**
6 | * Filter matcher based on the class name
7 | */
8 | object ClassNameFilter extends FilterMatcher {
9 | override protected def string(record: LogRecord): String = record.className
10 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/Filter.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.LogRecord
4 |
5 | /**
6 | * Filter for use in FilterBuilder, which is a LogModifier
7 | */
8 | trait Filter {
9 | def matches(record: LogRecord): Boolean
10 |
11 | def &&(that: Filter): Filter = AndFilters(List(this, that))
12 | def ||(that: Filter): Filter = OrFilters(List(this, that))
13 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/FilterBuilder.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.modify.LogModifier
4 | import scribe.{Level, LogRecord, Priority}
5 |
6 | /**
7 | * FilterBuilder allows convenient log modification
8 | */
9 | case class FilterBuilder(priority: Priority = Priority.Normal,
10 | select: List[Filter] = Nil,
11 | include: List[Filter] = Nil,
12 | exclude: List[Filter] = Nil,
13 | booster: Double => Double = d => d,
14 | _excludeUnselected: Boolean = false,
15 | id: String = "") extends LogModifier {
16 | def select(filters: Filter*): FilterBuilder = copy(select = select ::: filters.toList)
17 | def include(filters: Filter*): FilterBuilder = copy(include = include ::: filters.toList)
18 | def exclude(filters: Filter*): FilterBuilder = copy(exclude = exclude ::: filters.toList)
19 |
20 | def excludeUnselected: FilterBuilder = copy(_excludeUnselected = true)
21 | def includeUnselected: FilterBuilder = copy(_excludeUnselected = false)
22 |
23 | def boost(booster: Double => Double): FilterBuilder = copy(booster = booster)
24 | def setLevel(level: Level): FilterBuilder = boost(_ => level.value)
25 | def boostOneLevel: FilterBuilder = boost(d => d + 100.0)
26 | def boosted(minimumLevel: Level,
27 | destinationLevel: Level): FilterBuilder = {
28 | boost(d => if (d >= minimumLevel.value && d <= destinationLevel.value) {
29 | destinationLevel.value
30 | } else {
31 | d
32 | })
33 | }
34 |
35 | def priority(priority: Priority): FilterBuilder = copy(priority = priority)
36 |
37 | override def apply(record: LogRecord): Option[LogRecord] = {
38 | if (select.isEmpty || select.exists(_.matches(record))) {
39 | val incl = include.forall(_.matches(record))
40 | val excl = exclude.exists(_.matches(record))
41 | if (incl && !excl) {
42 | val boosted = record.boost(booster)
43 | Some(boosted)
44 | } else {
45 | None
46 | }
47 | } else if (_excludeUnselected) {
48 | None
49 | } else {
50 | Some(record)
51 | }
52 | }
53 |
54 | override def withId(id: String): LogModifier = copy(id = id)
55 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/FilterMatcher.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.LogRecord
4 |
5 | /**
6 | * Matcher for use with filters
7 | */
8 | trait FilterMatcher {
9 | protected def string(record: LogRecord): String
10 |
11 | def apply(exact: String): Filter = new Filter {
12 | override def matches(record: LogRecord): Boolean = string(record) == exact
13 | }
14 | def contains(value: String): Filter = new Filter {
15 | override def matches(record: LogRecord): Boolean = string(record).contains(value)
16 | }
17 | def startsWith(value: String): Filter = new Filter {
18 | override def matches(record: LogRecord): Boolean = string(record).startsWith(value)
19 | }
20 | def endsWith(value: String): Filter = new Filter {
21 | override def matches(record: LogRecord): Boolean = string(record).endsWith(value)
22 | }
23 | def regex(regex: String): Filter = new Filter {
24 | override def matches(record: LogRecord): Boolean = string(record).matches(regex)
25 | }
26 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/OrFilters.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.LogRecord
4 |
5 | case class OrFilters(filters: List[Filter]) extends Filter {
6 | override def matches(record: LogRecord): Boolean = filters.exists(_.matches(record))
7 |
8 | override def ||(that: Filter): Filter = copy(filters ::: List(that))
9 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/PackageNameFilter.scala:
--------------------------------------------------------------------------------
1 | package scribe.filter
2 |
3 | import scribe.LogRecord
4 |
5 | /**
6 | * Filters based on the package name
7 | */
8 | object PackageNameFilter extends FilterMatcher {
9 | override protected def string(record: LogRecord): String = {
10 | val index = record.className.lastIndexOf('.')
11 | if (index > 0) {
12 | record.className.substring(0, index)
13 | } else {
14 | record.className
15 | }
16 | }
17 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/filter/package.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.modify.LevelFilter
4 |
5 | package object filter extends FilterBuilder() {
6 | def level: LevelFilter.type = LevelFilter
7 | def packageName: PackageNameFilter.type = PackageNameFilter
8 | def className: ClassNameFilter.type = ClassNameFilter
9 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/format/AbbreviateBlock.scala:
--------------------------------------------------------------------------------
1 | package scribe.format
2 |
3 | import scribe.LogRecord
4 | import scribe.output.{LogOutput, TextOutput}
5 | import scribe.util.Abbreviator
6 |
7 | class AbbreviateBlock(block: FormatBlock,
8 | maxLength: Int,
9 | separator: Char,
10 | removeEntries: Boolean,
11 | abbreviateName: Boolean) extends FormatBlock {
12 | private lazy val cache = new ThreadLocal[Map[String, TextOutput]] {
13 | override def initialValue(): Map[String, TextOutput] = Map.empty
14 | }
15 |
16 | override def format(record: LogRecord): LogOutput = {
17 | val value = block.format(record).plainText
18 | val map = cache.get()
19 | map.get(value) match {
20 | case Some(output) => output
21 | case None =>
22 | val abbreviated = Abbreviator(value, maxLength, separator, removeEntries, abbreviateName)
23 | val output = new TextOutput(abbreviated)
24 | cache.set(map + (value -> output))
25 | output
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/format/CachingFormatBlock.scala:
--------------------------------------------------------------------------------
1 | package scribe.format
2 |
3 | import scribe.LogRecord
4 | import scribe.output.LogOutput
5 |
6 | /**
7 | * Adds time-based caching of output to reduce LogOutput cost
8 | */
9 | trait CachingFormatBlock extends FormatBlock {
10 | /**
11 | * The amount of time in milliseconds to cache each generation of the LogOutput
12 | */
13 | protected def cacheLength: Long
14 |
15 | private lazy val cache = new ThreadLocal[LogOutput]
16 | private lazy val lastTimeStamp = new ThreadLocal[Long] {
17 | override def initialValue(): Long = 0L
18 | }
19 |
20 | override final def format(record: LogRecord): LogOutput = {
21 | val timeStamp = record.timeStamp
22 | if (timeStamp - lastTimeStamp.get() > cacheLength) {
23 | val output = formatCached(record)
24 | cache.set(output)
25 | lastTimeStamp.set(timeStamp)
26 | output
27 | } else {
28 | cache.get()
29 | }
30 | }
31 |
32 | protected def formatCached(record: LogRecord): LogOutput
33 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/format/FormatBlocksFormatter.scala:
--------------------------------------------------------------------------------
1 | package scribe.format
2 |
3 | import scribe.LogRecord
4 | import scribe.output.{CompositeOutput, LogOutput}
5 |
6 | class FormatBlocksFormatter(blocks: List[FormatBlock]) extends Formatter {
7 | override def format(record: LogRecord): LogOutput = {
8 | new CompositeOutput(blocks.map(_.format(record)))
9 | }
10 |
11 | override def toString: String = s"blocks(${blocks.mkString(", ")})"
12 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/format/Formatter.scala:
--------------------------------------------------------------------------------
1 | package scribe.format
2 |
3 | import scribe.LogRecord
4 | import scribe.output.LogOutput
5 |
6 | trait Formatter {
7 | def format(record: LogRecord): LogOutput
8 | }
9 |
10 | object Formatter {
11 | /**
12 | * Only includes the log message and MDC
13 | */
14 | lazy val simple: Formatter = formatter"$messages$mdc"
15 | /**
16 | * Only includes the log message and MDC, but the message is colored based on the logging level
17 | */
18 | lazy val colored: Formatter = formatter"${levelColor(messages)}$mdc"
19 | /**
20 | * A classic logging style including the date, thread name (abbreviated), level, position, message, and MDC
21 | */
22 | lazy val classic: Formatter = formatter"$date [$threadNameAbbreviated] $level $position - $messages$mdc"
23 | /**
24 | * Colored, but more compact output to show more on a single line
25 | */
26 | lazy val compact: Formatter = formatter"$date ${string("[")}$levelColored${string("]")} ${green(position)} - $messages$mdc"
27 | /**
28 | * A rich log output format with coloring and lots of details. The original default format, but has been replaced by
29 | * default multi-line with "advanced".
30 | */
31 | lazy val enhanced: Formatter = Formatter.fromBlocks(
32 | dateCounter,
33 | space,
34 | openBracket,
35 | threadNameAbbreviated,
36 | closeBracket,
37 | space,
38 | openBracket,
39 | levelColoredPaddedRight,
40 | closeBracket,
41 | space,
42 | green(position),
43 | string(" - "),
44 | messages,
45 | mdc
46 | )
47 | /**
48 | * A multi-line formatter that includes expanded log information on the first line, and indented and auto-wrapping
49 | * message and MDC on the following line(s). The default format.
50 | */
51 | lazy val advanced: Formatter = Formatter.fromBlocks(
52 | groupBySecond(
53 | cyan(bold(dateFull)),
54 | space,
55 | italic(threadName),
56 | space,
57 | levelColored,
58 | space,
59 | green(position),
60 | newLine
61 | ),
62 | multiLine(messages),
63 | multiLine(mdcMultiLine)
64 | )
65 | /**
66 | * A strict format with a focus on consistent width.
67 | */
68 | lazy val strict: Formatter = formatter"$date [$threadNameAbbreviated] $levelPaddedRight $positionAbbreviated - $messages$mdc"
69 | /**
70 | * The default formatter. This is used as a default when the formatter isn't explicitly specified. Defaults to
71 | * enhanced.
72 | */
73 | var default: Formatter = advanced
74 |
75 | def fromBlocks(blocks: FormatBlock*): Formatter = new FormatBlocksFormatter(blocks.toList)
76 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/format/RightPaddingBlock.scala:
--------------------------------------------------------------------------------
1 | package scribe.format
2 |
3 | import scribe.LogRecord
4 | import scribe.output.{LogOutput, TextOutput}
5 |
6 | class RightPaddingBlock(block: FormatBlock, length: Int, padding: Char) extends FormatBlock {
7 | override def format(record: LogRecord): LogOutput = {
8 | val value = block.format(record).plainText
9 | new TextOutput(value.padTo(length, " ").mkString)
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/handler/FunctionalLogHandler.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.LogRecord
4 | import scribe.modify.LogModifier
5 |
6 | case class FunctionalLogHandler(f: LogRecord => Unit, modifiers: List[LogModifier]) extends LogHandler {
7 | def setModifiers(modifiers: List[LogModifier]): LogHandler = copy(modifiers = modifiers.sorted)
8 |
9 | override def log(record: LogRecord): Unit = {
10 | modifiers.foldLeft(Option(record))((r, lm) => r.flatMap(lm.apply)).foreach { r =>
11 | f(r)
12 | }
13 | }
14 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/handler/LogHandle.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.LogRecord
4 |
5 | trait LogHandle {
6 | def log(handler: LogHandlerBuilder, record: LogRecord): Unit
7 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/handler/LogHandler.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.format.Formatter
4 | import scribe.modify.{LevelFilter, LogModifier}
5 | import scribe.output.format.OutputFormat
6 | import scribe.writer.{ConsoleWriter, Writer}
7 | import scribe.{Level, LogRecord}
8 |
9 | /**
10 | * LogHandler is responsible for causing some side-effect with a `LogRecord`. This usually includes formatting the record
11 | * with a `Formatter` and writing it to a `Writer`, although some more creative implementations exist to do more advanced
12 | * actions. LogHandlers are added to `Logger` instances via `withHandler`, although it's usually sufficient to use the
13 | * `withHandler` method that takes a `Formatter` and `Writer` instead of defining a `LogHandler` manually.
14 | */
15 | trait LogHandler {
16 | def log(record: LogRecord): Unit
17 | }
18 |
19 | object LogHandler {
20 | def apply(formatter: Formatter = Formatter.default,
21 | writer: Writer = ConsoleWriter,
22 | minimumLevel: Option[Level] = None,
23 | modifiers: List[LogModifier] = Nil,
24 | outputFormat: OutputFormat = OutputFormat.default,
25 | handle: LogHandle = SynchronousLogHandle): LogHandlerBuilder = {
26 | val mods = (minimumLevel.map(l => (LevelFilter >= l).alwaysApply).toList ::: modifiers).sortBy(_.priority)
27 | LogHandlerBuilder(formatter, writer, outputFormat, mods, handle)
28 | }
29 |
30 | def apply(minimumLevel: Level)(f: LogRecord => Unit): FunctionalLogHandler = {
31 | FunctionalLogHandler(f, List(LevelFilter >= minimumLevel))
32 | }
33 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/handler/LogHandlerBuilder.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.LogRecord
4 | import scribe.format.Formatter
5 | import scribe.modify.LogModifier
6 | import scribe.output.format.OutputFormat
7 | import scribe.writer.{ConsoleWriter, Writer}
8 |
9 | case class LogHandlerBuilder(formatter: Formatter = Formatter.default,
10 | writer: Writer = ConsoleWriter,
11 | outputFormat: OutputFormat = OutputFormat.default,
12 | modifiers: List[LogModifier] = Nil,
13 | handle: LogHandle = SynchronousLogHandle) extends LogHandler {
14 | override def log(record: LogRecord): Unit = handle.log(this, record)
15 |
16 | def withFormatter(formatter: Formatter): LogHandlerBuilder = copy(formatter = formatter)
17 |
18 | def withWriter(writer: Writer): LogHandlerBuilder = copy(writer = writer)
19 |
20 | def withModifiers(modifiers: LogModifier*): LogHandlerBuilder = copy(modifiers = modifiers.toList ::: this.modifiers)
21 |
22 | def withLogHandle(handle: LogHandle): LogHandlerBuilder = copy(handle = handle)
23 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/handler/LogOverflowException.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | class LogOverflowException(message: String) extends RuntimeException(message)
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/handler/SynchronousLogHandle.scala:
--------------------------------------------------------------------------------
1 | package scribe.handler
2 |
3 | import scribe.LogRecord
4 |
5 | object SynchronousLogHandle extends LogHandle {
6 | def log(handler: LogHandlerBuilder, record: LogRecord): Unit = {
7 | record.modify(handler.modifiers).foreach { r =>
8 | val logOutput = handler.formatter.format(r)
9 | handler.writer.write(record, logOutput, handler.outputFormat)
10 | }
11 | }
12 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/jul/JULHandler.scala:
--------------------------------------------------------------------------------
1 | package scribe.jul
2 |
3 | import scribe._
4 |
5 | import java.util.logging.{Level => JLevel}
6 |
7 | object JULHandler extends java.util.logging.Handler {
8 | override def publish(record: java.util.logging.LogRecord): Unit = {
9 | val logger = scribe.Logger(record.getLoggerName)
10 | val level = l2l(record.getLevel)
11 | val logRecord = LogRecord(
12 | level = level,
13 | levelValue = level.value,
14 | messages = List(record.getMessage),
15 | fileName = "",
16 | className = Option(record.getSourceClassName).getOrElse(record.getLoggerName),
17 | methodName = Option(record.getSourceMethodName),
18 | line = None,
19 | column = None
20 | )
21 | logger.log(logRecord)
22 | }
23 |
24 | private def l2l(level: JLevel): Level = level match {
25 | case JLevel.FINEST => Level.Trace
26 | case JLevel.FINER => Level.Trace
27 | case JLevel.FINE => Level.Trace
28 | case JLevel.CONFIG => Level.Debug
29 | case JLevel.INFO => Level.Info
30 | case JLevel.WARNING => Level.Warn
31 | case JLevel.SEVERE => Level.Error
32 | case JLevel.OFF => Level.Trace
33 | case JLevel.ALL => Level.Fatal
34 | }
35 |
36 | override def flush(): Unit = {}
37 |
38 | override def close(): Unit = {}
39 | }
40 |
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/mdc/MDC.scala:
--------------------------------------------------------------------------------
1 | package scribe.mdc
2 |
3 | import perfolation.double2Implicits
4 | import scribe.util.Time
5 |
6 | import scala.language.implicitConversions
7 |
8 | object MDC {
9 | /**
10 | * Global MDC instance. By default, all instances of MDC inherit from this.
11 | */
12 | lazy val global: MDC = creator(None)
13 |
14 | /**
15 | * The MDCManager responsible for retrieving an MDC instance for the context. By default this uses MDCThreadLocal but
16 | * can replaced with something more advanced.
17 | */
18 | var manager: MDCManager = MDCThreadLocal
19 |
20 | /**
21 | * The function to create MDC instances. Receives the parent and creates a new MDC instance. By default, this will use
22 | * MDCMap, but this can be replaced to provide a different implementation.
23 | */
24 | var creator: Option[MDC] => MDC = parent => new MDCMap(parent)
25 |
26 | def apply[Return](f: MDC => Return): Return = {
27 | val previous = manager.instance
28 | val mdc = new MDCMap(Some(previous))
29 | try {
30 | manager.instance = mdc
31 | f(mdc)
32 | } finally {
33 | manager.instance = previous
34 | }
35 | }
36 |
37 | /**
38 | * Convenience implicit to get the current instance of MDC from the manager
39 | */
40 | implicit def instance: MDC = manager.instance
41 |
42 | /**
43 | * Sets the instance for the current context
44 | */
45 | def set(mdc: MDC): Unit = manager.instance = mdc
46 |
47 | /**
48 | * Sets the instance for the current context for the duration of the function `f`.
49 | */
50 | def contextualize[Return](mdc: MDC)(f: => Return): Return = {
51 | val previous = manager.instance
52 | set(mdc)
53 | try {
54 | f
55 | } finally {
56 | set(previous)
57 | }
58 | }
59 |
60 | def map: Map[String, () => Any] = instance.map
61 | def get(key: String): Option[Any] = instance.get(key).map(_())
62 | def getOrElse(key: String, default: => Any): Any= get(key).getOrElse(default)
63 | def update(key: String, value: => Any): Option[Any] = instance(key) = value
64 | def set(key: String, value: Option[Any]): Option[Any] = instance.set(key, value)
65 | def context[Return](values: (String, MDCValue)*)(f: => Return): Return = instance.context(values: _*)(f)
66 | def elapsed(key: String = "elapsed", timeFunction: () => Long = Time.function): Unit = instance.elapsed(key, timeFunction)
67 | def remove(key: String): Option[Any] = instance.remove(key)
68 | def contains(key: String): Boolean = instance.contains(key)
69 | def clear(): Unit = instance.clear()
70 | }
71 |
72 | trait MDC {
73 | /**
74 | * Retrieves the functional map
75 | */
76 | def map: Map[String, () => Any]
77 |
78 | /**
79 | * Gets the value function for this key if set
80 | */
81 | def get(key: String): Option[() => Any]
82 |
83 | /**
84 | * Updates the value for the specified key. The `context` method should be preferred to avoid leaving MDC values set
85 | * forever. Returns the previous value for this key.
86 | */
87 | def update(key: String, value: => Any): Option[Any]
88 |
89 | /**
90 | * Sets the value for the specified key. This method differs from `update` by taking an `Option` that will remove the
91 | * key if set to `None`. Returns the previous value for this key.
92 | */
93 | def set(key: String, value: Option[Any]): Option[Any]
94 |
95 | /**
96 | * Contextualizes setting multiple values similar to `update`, but returns them to their previous value upon
97 | * completion of the context function `f`.
98 | */
99 | def context[Return](values: (String, MDCValue)*)(f: => Return): Return
100 |
101 | /**
102 | * Applies an elapsed function as an MDC value. This represents a dynamically changing value of time elapsed since the
103 | * this was set.
104 | */
105 | def elapsed(key: String, timeFunction: () => Long = Time.function): Unit = {
106 | val start = timeFunction()
107 | update(key, s"${((timeFunction() - start) / 1000.0).f()}s")
108 | }
109 |
110 | /**
111 | * Removes a key from this MDC instance
112 | */
113 | def remove(key: String): Option[Any]
114 |
115 | /**
116 | * True if this MDC contains the specified key
117 | */
118 | def contains(key: String): Boolean
119 |
120 | /**
121 | * Clears all values from this MDC
122 | */
123 | def clear(): Unit
124 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/mdc/MDCManager.scala:
--------------------------------------------------------------------------------
1 | package scribe.mdc
2 |
3 | trait MDCManager {
4 | def instance: MDC
5 |
6 | def instance_=(mdc: MDC): Unit
7 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/mdc/MDCMap.scala:
--------------------------------------------------------------------------------
1 | package scribe.mdc
2 |
3 | import scribe.util.Time
4 | import perfolation._
5 |
6 | import java.util.concurrent.ConcurrentHashMap
7 | import scala.jdk.CollectionConverters._
8 |
9 | class MDCMap(parent: Option[MDC]) extends MDC {
10 | private val _map = new ConcurrentHashMap[String, () => Any]
11 |
12 | override def map: Map[String, () => Any] = _map.asScala.toMap
13 |
14 | override def get(key: String): Option[() => Any] = Option(_map.get(key)).orElse(parent.flatMap(_.get(key)))
15 |
16 | override def update(key: String, value: => Any): Option[Any] = Option(_map.put(key, () => value)).map(_())
17 |
18 | override def set(key: String, value: Option[Any]): Option[Any] = value match {
19 | case Some(v) => update(key, v)
20 | case None => remove(key)
21 | }
22 |
23 | override def context[Return](values: (String, MDCValue)*)(f: => Return): Return = {
24 | val previous = values.map {
25 | case (key, value) => key -> update(key, value.value())
26 | }
27 | try {
28 | f
29 | } finally {
30 | previous.foreach {
31 | case (key, value) => set(key, value)
32 | }
33 | }
34 | }
35 |
36 | override def remove(key: String): Option[Any] = Option(_map.remove(key)).map(_())
37 |
38 | override def contains(key: String): Boolean = map.contains(key)
39 |
40 | override def clear(): Unit = _map.clear()
41 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/mdc/MDCThreadLocal.scala:
--------------------------------------------------------------------------------
1 | package scribe.mdc
2 |
3 | object MDCThreadLocal extends MDCManager {
4 | private val threadLocal: InheritableThreadLocal[MDC] = new InheritableThreadLocal[MDC] {
5 | override def initialValue(): MDC = MDC.creator(Some(MDC.global))
6 |
7 | override def childValue(parentValue: MDC): MDC = MDC.creator(Option(parentValue).orElse(Some(MDC.global)))
8 | }
9 |
10 | override def instance: MDC = threadLocal.get()
11 |
12 | override def instance_=(mdc: MDC): Unit = threadLocal.set(mdc)
13 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/mdc/MDCValue.scala:
--------------------------------------------------------------------------------
1 | package scribe.mdc
2 |
3 | case class MDCValue(value: () => Any) extends AnyVal
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/mdc/package.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scala.language.implicitConversions
4 |
5 | package object mdc {
6 | implicit def any2Value(value: => Any): MDCValue = this.value(value)
7 |
8 | def static(value: Any): MDCValue = MDCValue(() => value)
9 | def value(value: => Any): MDCValue = MDCValue(() => value)
10 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/message/EmptyMessage.scala:
--------------------------------------------------------------------------------
1 | package scribe.message
2 |
3 | import scribe.output.{EmptyOutput, LogOutput}
4 |
5 | object EmptyMessage extends Message[String] {
6 | override val value: String = ""
7 | override val logOutput: LogOutput = EmptyOutput
8 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/message/LazyMessage.scala:
--------------------------------------------------------------------------------
1 | package scribe.message
2 |
3 | import scribe.Loggable
4 | import scribe.output.LogOutput
5 |
6 | class LazyMessage[M](function: () => M)
7 | (implicit loggable: Loggable[M]) extends Message[M] {
8 | override lazy val value: M = function()
9 | override lazy val logOutput: LogOutput = loggable(value)
10 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/message/LoggableMessage.scala:
--------------------------------------------------------------------------------
1 | package scribe.message
2 |
3 | import scribe.output.{LogOutput, TextOutput}
4 | import scribe.throwable.TraceLoggableMessage
5 | import scribe.{LogFeature, LogRecord, Loggable}
6 |
7 | import scala.language.implicitConversions
8 |
9 | trait LoggableMessage extends LogFeature {
10 | def value: Any
11 | def logOutput: LogOutput
12 |
13 | override def apply(record: LogRecord): LogRecord = record.withMessages(this)
14 | }
15 |
16 | object LoggableMessage {
17 | implicit def string2LoggableMessage(s: => String): LoggableMessage = LoggableMessage[String](new TextOutput(_))(s)
18 | implicit def stringList2Messages(list: => List[String]): List[LoggableMessage] =
19 | list.map(f => string2LoggableMessage(f))
20 | implicit def throwableList2Messages(list: List[Throwable]): List[LoggableMessage] =
21 | list.map(f => TraceLoggableMessage(f))
22 |
23 | def apply[V](toLogOutput: V => LogOutput)(value: => V): LoggableMessage =
24 | new LazyMessage[V](() => value)(new Loggable[V] {
25 | override def apply(value: V): LogOutput = toLogOutput(value)
26 | })
27 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/message/Message.scala:
--------------------------------------------------------------------------------
1 | package scribe.message
2 |
3 | trait Message[M] extends LoggableMessage {
4 | override def value: M
5 | }
6 |
7 | //object Message {
8 | // def static[M: Loggable](value: M): Message[M] = StaticMessage(value)
9 | // def apply[M: Loggable](value: => M): Message[M] = new LazyMessage[M](() => value)
10 | // def empty: Message[String] = EmptyMessage
11 | //}
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/message/StaticMessage.scala:
--------------------------------------------------------------------------------
1 | package scribe.message
2 |
3 | import scribe.Loggable
4 | import scribe.output.LogOutput
5 |
6 | case class StaticMessage[M](value: M)
7 | (implicit loggable: Loggable[M]) extends Message[M] {
8 | override lazy val logOutput: LogOutput = loggable(value)
9 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/modify/LevelFilter.scala:
--------------------------------------------------------------------------------
1 | package scribe.modify
2 |
3 | import scribe.filter.Filter
4 | import scribe.{Level, LogRecord, Priority}
5 |
6 | case class LevelFilter(include: Double => Boolean,
7 | exclude: Double => Boolean,
8 | priority: Priority,
9 | ignoreBoost: Boolean = false,
10 | id: String = LevelFilter.Id) extends LogModifier with Filter {
11 | def accepts(level: Double): Boolean = {
12 | val i = include(level)
13 | val e = exclude(level)
14 | i && !e
15 | }
16 |
17 | override def apply(record: LogRecord): Option[LogRecord] = if (accepts(if (ignoreBoost) record.level.value else record.levelValue)) {
18 | Some(record)
19 | } else {
20 | None
21 | }
22 |
23 | override def matches(record: LogRecord): Boolean = accepts(if (ignoreBoost) record.level.value else record.levelValue)
24 |
25 | override def withId(id: String): LogModifier = copy(id = id)
26 | }
27 |
28 | object LevelFilter {
29 | val Id: String = "LevelFilter"
30 |
31 | lazy val ExcludeAll: LevelFilter = new LevelFilter(include = _ => false, exclude = _ => true, Priority.Low)
32 | lazy val IncludeAll: LevelFilter = new LevelFilter(include = _ => true, exclude = _ => false, Priority.Low)
33 |
34 | def >(level: Level): LevelFilter = new LevelFilter(
35 | include = _ > level.value,
36 | exclude = _ => false,
37 | priority = Priority.High
38 | )
39 | def >=(level: Level): LevelFilter = new LevelFilter(
40 | include = _ >= level.value,
41 | exclude = _ => false,
42 | priority = Priority.High
43 | )
44 | def <(level: Level): LevelFilter = new LevelFilter(
45 | include = _ < level.value,
46 | exclude = _ => false,
47 | priority = Priority.High
48 | )
49 | def <=(level: Level): LevelFilter = new LevelFilter(
50 | include = _ <= level.value,
51 | exclude = _ => false,
52 | priority = Priority.High
53 | )
54 | def ===(level: Level): LevelFilter = new LevelFilter(
55 | include = _ == level.value,
56 | exclude = _ => false,
57 | priority = Priority.High
58 | )
59 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/modify/LogBooster.scala:
--------------------------------------------------------------------------------
1 | package scribe.modify
2 |
3 | import scribe.{LogRecord, Priority}
4 |
5 | case class LogBooster(booster: Double => Double,
6 | priority: Priority,
7 | id: String = LogBooster.Id) extends LogModifier {
8 | override def apply(record: LogRecord): Option[LogRecord] = Some(record.boost(booster))
9 |
10 | override def withId(id: String): LogModifier = copy(id = id)
11 | }
12 |
13 | object LogBooster {
14 | val Id: String = "LogBooster"
15 |
16 | def multiply(multiplier: Double, priority: Priority = Priority.Normal): LogBooster = new LogBooster(_ * multiplier, priority)
17 | def add(value: Double, priority: Priority = Priority.Normal): LogBooster = new LogBooster(_ + value, priority)
18 | def subtract(value: Double, priority: Priority = Priority.Normal): LogBooster = new LogBooster(_ - value, priority)
19 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/modify/LogModifier.scala:
--------------------------------------------------------------------------------
1 | package scribe.modify
2 |
3 | import scribe.{LogRecord, Priority}
4 |
5 | /**
6 | * LogModifier is attached to `Logger` instances in order to manipulate `LogRecord`s, before they are handled by a
7 | * `LogHandler`.
8 | */
9 | trait LogModifier {
10 | /**
11 | * Represents a unique identifier for this type of modifier. This is used when adding a LogModifier to a Logger to
12 | * replace by type.
13 | */
14 | def id: String
15 |
16 | /**
17 | * Multiple LogModifiers attached to the same `Logger` are automatically sorted by Priority.
18 | */
19 | def priority: Priority
20 |
21 | /**
22 | * Handles modification of a LogRecord
23 | *
24 | * @param record the record to modify
25 | * @return Some LogRecord that should continue to propagate or None if the logging action should be canceled
26 | */
27 | def apply(record: LogRecord): Option[LogRecord]
28 |
29 | def withId(id: String): LogModifier
30 |
31 | def alwaysApply: LogModifier = withId("")
32 | }
33 |
34 | object LogModifier {
35 | implicit final val LogModifierOrdering: Ordering[LogModifier] = Ordering.by(_.priority)
36 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/output/format/ASCIIOutputFormat.scala:
--------------------------------------------------------------------------------
1 | package scribe.output.format
2 | import scribe.output.LogOutput
3 |
4 | object ASCIIOutputFormat extends OutputFormat {
5 | override def apply(output: LogOutput, stream: String => Unit): Unit = stream(output.plainText)
6 | }
7 |
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/output/format/OutputFormat.scala:
--------------------------------------------------------------------------------
1 | package scribe.output.format
2 |
3 | import scribe.Platform
4 | import scribe.output.LogOutput
5 |
6 | trait OutputFormat {
7 | def init(stream: String => Unit): Unit = {}
8 |
9 | def begin(stream: String => Unit): Unit = {}
10 |
11 | def apply(output: LogOutput, stream: String => Unit): Unit
12 |
13 | def end(stream: String => Unit): Unit = {}
14 | }
15 |
16 | object OutputFormat {
17 | /**
18 | * Defaults to platform-specific format.
19 | */
20 | var default: OutputFormat = Platform.outputFormat()
21 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/output/package.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scala.language.implicitConversions
4 |
5 | package object output {
6 | implicit def text(s: String): LogOutput = new TextOutput(s)
7 | implicit def seq2LogOutput(entries: Seq[LogOutput]): LogOutput = new CompositeOutput(entries.toList)
8 |
9 | implicit class EnhancedColor(color: Color) {
10 | def apply(out: LogOutput*): LogOutput = fg(out: _*)
11 | def fg(out: LogOutput*): LogOutput = output.fg(color, out: _*)
12 | def bg(out: LogOutput*): LogOutput = output.bg(color, out: _*)
13 | }
14 |
15 | def out(entries: LogOutput*): LogOutput = if (entries.length == 1) {
16 | entries.head
17 | } else {
18 | new CompositeOutput(entries.toList)
19 | }
20 |
21 | def color(color: Color, output: LogOutput*): LogOutput = fg(color, output: _*)
22 |
23 | def fg(color: Color, output: LogOutput*): LogOutput = new ColoredOutput(color, out(output: _*))
24 |
25 | def bg(color: Color, output: LogOutput*): LogOutput = new BackgroundColoredOutput(color, out(output: _*))
26 |
27 | def black(output: LogOutput*): LogOutput = fg(Color.Black, output: _*)
28 | def blue(output: LogOutput*): LogOutput = fg(Color.Blue, output: _*)
29 | def cyan(output: LogOutput*): LogOutput = fg(Color.Cyan, output: _*)
30 | def green(output: LogOutput*): LogOutput = fg(Color.Green, output: _*)
31 | def magenta(output: LogOutput*): LogOutput = fg(Color.Magenta, output: _*)
32 | def red(output: LogOutput*): LogOutput = fg(Color.Red, output: _*)
33 | def white(output: LogOutput*): LogOutput = fg(Color.White, output: _*)
34 | def yellow(output: LogOutput*): LogOutput = fg(Color.Yellow, output: _*)
35 | def gray(output: LogOutput*): LogOutput = fg(Color.Gray, output: _*)
36 | def brightBlue(output: LogOutput*): LogOutput = fg(Color.BrightBlue, output: _*)
37 | def brightCyan(output: LogOutput*): LogOutput = fg(Color.BrightCyan, output: _*)
38 | def brightGreen(output: LogOutput*): LogOutput = fg(Color.BrightGreen, output: _*)
39 | def brightMagenta(output: LogOutput*): LogOutput = fg(Color.BrightMagenta, output: _*)
40 | def brightRed(output: LogOutput*): LogOutput = fg(Color.BrightRed, output: _*)
41 | def brightWhite(output: LogOutput*): LogOutput = fg(Color.BrightWhite, output: _*)
42 | def brightYellow(output: LogOutput*): LogOutput = fg(Color.BrightYellow, output: _*)
43 |
44 | def bgBlack(output: LogOutput*): LogOutput = bg(Color.Black, output: _*)
45 | def bgBlue(output: LogOutput*): LogOutput = bg(Color.Blue, output: _*)
46 | def bgCyan(output: LogOutput*): LogOutput = bg(Color.Cyan, output: _*)
47 | def bgGreen(output: LogOutput*): LogOutput = bg(Color.Green, output: _*)
48 | def bgMagenta(output: LogOutput*): LogOutput = bg(Color.Magenta, output: _*)
49 | def bgRed(output: LogOutput*): LogOutput = bg(Color.Red, output: _*)
50 | def bgWhite(output: LogOutput*): LogOutput = bg(Color.White, output: _*)
51 | def bgYellow(output: LogOutput*): LogOutput = bg(Color.Yellow, output: _*)
52 | def bgGray(output: LogOutput*): LogOutput = bg(Color.Gray, output: _*)
53 | def bgBrightBlue(output: LogOutput*): LogOutput = bg(Color.BrightBlue, output: _*)
54 | def bgBrightCyan(output: LogOutput*): LogOutput = bg(Color.BrightCyan, output: _*)
55 | def bgBrightGreen(output: LogOutput*): LogOutput = bg(Color.BrightGreen, output: _*)
56 | def bgBrightMagenta(output: LogOutput*): LogOutput = bg(Color.BrightMagenta, output: _*)
57 | def bgBrightRed(output: LogOutput*): LogOutput = bg(Color.BrightRed, output: _*)
58 | def bgBrightWhite(output: LogOutput*): LogOutput = bg(Color.BrightWhite, output: _*)
59 | def bgBrightYellow(output: LogOutput*): LogOutput = bg(Color.BrightYellow, output: _*)
60 |
61 | def url(url: String, output: LogOutput): LogOutput = new URLOutput(url, output)
62 | def bold(output: LogOutput*): LogOutput = new BoldOutput(out(output: _*))
63 | def italic(output: LogOutput*): LogOutput = new ItalicOutput(out(output: _*))
64 | def underline(output: LogOutput*): LogOutput = new UnderlineOutput(out(output: _*))
65 | def strikethrough(output: LogOutput*): LogOutput = new StrikethroughOutput(out(output: _*))
66 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/package.scala:
--------------------------------------------------------------------------------
1 | import scribe.LogFeature
2 | import scribe.mdc.MDC
3 | import sourcecode.{FileName, Line, Name, Pkg}
4 |
5 | import scala.language.experimental.macros
6 | import scala.language.implicitConversions
7 |
8 | package object scribe extends LoggerSupport[Unit] {
9 | lazy val lineSeparator: String = System.getProperty("line.separator")
10 |
11 | protected[scribe] var disposables = Set.empty[() => Unit]
12 |
13 | @inline
14 | override final def log(record: => LogRecord): Unit = Logger(record.className).log(record)
15 |
16 | override def log(level: Level, mdc: MDC, features: LogFeature*)
17 | (implicit pkg: Pkg, fileName: FileName, name: Name, line: Line): Unit =
18 | if (includes(level)) super.log(level, mdc, features: _*)
19 |
20 | def includes(level: Level)(implicit pkg: sourcecode.Pkg,
21 | fileName: sourcecode.FileName,
22 | name: sourcecode.Name,
23 | line: sourcecode.Line): Boolean = {
24 | val (_, className) = LoggerSupport.className(pkg, fileName)
25 | Logger(className).includes(level)
26 | }
27 |
28 | /**
29 | * LogFeature convenience functionality to set data on a log
30 | */
31 | def data(key: String, value: => Any): LogFeature = LogFeature(_(key) = () => value)
32 |
33 | /**
34 | * LogFeature convenience functionality to set a map of data on a log
35 | */
36 | def data(map: Map[String, Any]): LogFeature = LogFeature { r =>
37 | map.foldLeft(r)((record, tuple) => record(tuple._1) = () => tuple._2)
38 | }
39 |
40 | /**
41 | * LogFeature convenience functionality to set a booster on a log
42 | */
43 | def boost(booster: Double => Double): LogFeature = LogFeature(_.boost(booster))
44 |
45 | /**
46 | * LogFeature convenience functionality to override the default thread on a log
47 | */
48 | def thread(thread: Thread): LogFeature = LogFeature(_.copy(thread = thread))
49 |
50 | /**
51 | * LogFeature convenience functionality to override the timeStamp on a log
52 | */
53 | def timeStamp(timeStamp: Long): LogFeature = LogFeature(_.copy(timeStamp = timeStamp))
54 |
55 | def dispose(): Unit = disposables.foreach(d => d())
56 |
57 | implicit def level2Double(level: Level): Double = level.value
58 |
59 | implicit class AnyLogging(value: Any) {
60 | def logger: Logger = Logger(value.getClass.getName)
61 | }
62 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/throwable/Trace.scala:
--------------------------------------------------------------------------------
1 | package scribe.throwable
2 |
3 | import scribe.message.LoggableMessage
4 | import scribe.output.{CompositeOutput, EmptyOutput, LogOutput, TextOutput}
5 |
6 | import scala.annotation.tailrec
7 |
8 | case class Trace(className: String,
9 | message: Option[String],
10 | elements: List[TraceElement],
11 | cause: Option[Trace])
12 |
13 | case class TraceLoggableMessage(throwable: Throwable) extends LoggableMessage {
14 | override lazy val value: Trace = Trace.throwable2Trace(throwable)
15 |
16 | override lazy val logOutput: LogOutput = Trace.toLogOutput(EmptyOutput, value)
17 | }
18 |
19 | object Trace {
20 | private val NativeMethod: Int = -2
21 |
22 | def throwable2Trace(throwable: Throwable): Trace = {
23 | val elements = throwable.getStackTrace.toList.map { e =>
24 | TraceElement(e.getClassName, e.getFileName, e.getMethodName, e.getLineNumber)
25 | }
26 | val message = throwable.getLocalizedMessage match {
27 | case null | "" => None
28 | case m => Some(m)
29 | }
30 | Trace(throwable.getClass.getName, message, elements, Option(throwable.getCause).map(throwable2Trace))
31 | }
32 |
33 | @tailrec
34 | final def toLogOutput(message: LogOutput,
35 | trace: Trace,
36 | primaryCause: Boolean = true,
37 | b: StringBuilder = new StringBuilder): LogOutput = {
38 | if (!primaryCause) {
39 | b.append("Caused by: ")
40 | }
41 | b.append(trace.className)
42 | trace.message.foreach { message =>
43 | b.append(": ")
44 | b.append(message)
45 | }
46 | b.append(scribe.lineSeparator)
47 | writeStackTrace(b, trace.elements)
48 | trace.cause match {
49 | case Some(cause) => toLogOutput(message, cause, primaryCause = false, b = b)
50 | case None =>
51 | val output = new TextOutput(b.toString())
52 | if (message == EmptyOutput) {
53 | output
54 | } else {
55 | new CompositeOutput(List(message, new TextOutput(scribe.lineSeparator), output))
56 | }
57 | }
58 | }
59 |
60 | @tailrec
61 | private def writeStackTrace(b: StringBuilder, elements: List[TraceElement]): Unit = {
62 | elements.headOption match {
63 | case None => // No more elements
64 | case Some(head) =>
65 | b.append("\tat ")
66 | b.append(head.`class`)
67 | b.append('.')
68 | b.append(head.method)
69 | b.append('(')
70 | if (head.line == NativeMethod) {
71 | b.append("Native Method")
72 | } else {
73 | b.append(head.fileName)
74 | if (head.line > 0) {
75 | b.append(':')
76 | b.append(head.line)
77 | }
78 | }
79 | b.append(')')
80 | b.append(scribe.lineSeparator)
81 | writeStackTrace(b, elements.tail)
82 | }
83 | }
84 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/throwable/TraceElement.scala:
--------------------------------------------------------------------------------
1 | package scribe.throwable
2 |
3 | case class TraceElement(`class`: String, fileName: String, method: String, line: Int)
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/util/Abbreviator.scala:
--------------------------------------------------------------------------------
1 | package scribe.util
2 |
3 |
4 | object Abbreviator {
5 | def apply(value: String,
6 | maxLength: Int,
7 | separator: Char = '.',
8 | removeEntries: Boolean = true,
9 | abbreviateName: Boolean = false): String = {
10 | var entries = value.split(separator).filter(_.nonEmpty)
11 | def result = entries.mkString(separator.toString)
12 | var position = 0
13 | while (result.length > maxLength && position < entries.length - 1) {
14 | entries(position) = entries(position).charAt(0).toString
15 | position += 1
16 | }
17 | if (result.length > maxLength && removeEntries) {
18 | entries = Array(entries.last)
19 | }
20 | if (result.length > math.max(maxLength, 4) && abbreviateName) {
21 | val entry = entries.head
22 | val offset = math.max(maxLength - 3, 1)
23 | val abbreviated = entry.take(offset)
24 | entries = Array(s"$abbreviated...")
25 | }
26 | result
27 | }
28 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/util/Time.scala:
--------------------------------------------------------------------------------
1 | package scribe.util
2 |
3 | object Time {
4 | var function: () => Long = _
5 |
6 | reset()
7 |
8 | def apply(): Long = function()
9 |
10 | def contextualize[Return](t: => Long)(f: => Return): Return = {
11 | val old = function
12 | function = () => t
13 | try {
14 | f
15 | } finally {
16 | function = old
17 | }
18 | }
19 |
20 | def reset(): Unit = function = () => System.currentTimeMillis()
21 | }
22 |
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/CacheWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 | import scribe.LogRecord
3 | import scribe.output.LogOutput
4 | import scribe.output.format.OutputFormat
5 |
6 | class CacheWriter(max: Int = CacheWriter.DefaultMax) extends Writer {
7 | private var recordCache = List.empty[LogRecord]
8 | private var outputCache = List.empty[LogOutput]
9 |
10 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = synchronized {
11 | recordCache = (record :: recordCache).take(max)
12 | outputCache = (output :: outputCache).take(max)
13 | }
14 |
15 | def records: List[LogRecord] = recordCache
16 | def output: List[LogOutput] = outputCache
17 |
18 | def consume[Return](f: List[LogRecord] => Return): Return = try {
19 | f(records)
20 | } finally {
21 | clear()
22 | }
23 |
24 | def consumeMessages[Return](f: List[String] => Return): Return = consume { list =>
25 | f(list.map(_.messages.map(_.logOutput.plainText).mkString(" ")))
26 | }
27 |
28 | def clear(): Unit = synchronized {
29 | recordCache = Nil
30 | outputCache = Nil
31 | }
32 | }
33 |
34 | object CacheWriter {
35 | val DefaultMax: Int = 100
36 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/ConsoleWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe._
4 | import scribe.output._
5 | import scribe.output.format.OutputFormat
6 |
7 | object ConsoleWriter extends Writer {
8 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
9 | Platform.consoleWriter.write(record, output, outputFormat)
10 | }
11 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/NullWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe.LogRecord
4 | import scribe.output.LogOutput
5 | import scribe.output.format.OutputFormat
6 |
7 | object NullWriter extends Writer {
8 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {}
9 | }
10 |
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/SystemErrWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe.output.LogOutput
4 | import scribe.output.format.OutputFormat
5 | import scribe.{LogRecord, Logger}
6 |
7 | /**
8 | * SystemErrWriter writes logs to System.err
9 | */
10 | object SystemErrWriter extends Writer {
11 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit =
12 | SystemWriter.write(Logger.system.err, output, outputFormat)
13 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/SystemOutWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe.output.LogOutput
4 | import scribe.output.format.OutputFormat
5 | import scribe.{LogRecord, Logger}
6 |
7 | /**
8 | * SystemOutWriter writes logs to System.out
9 | */
10 | object SystemOutWriter extends Writer {
11 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit =
12 | SystemWriter.write(Logger.system.out, output, outputFormat)
13 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/SystemWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe.output._
4 | import scribe.output.format.OutputFormat
5 | import scribe.{Level, LogRecord, Logger}
6 |
7 | import java.io.PrintStream
8 | import scala.collection.mutable
9 | import scala.language.implicitConversions
10 | import scala.math.Ordering.Implicits._
11 |
12 | /**
13 | * SystemWriter writes to System.out or System.err choosing the latter if the level is higher than Info
14 | */
15 | object SystemWriter extends Writer {
16 | /**
17 | * If true, will always synchronize writing to the console to avoid interleaved text. Most native consoles will
18 | * handle this automatically, but IntelliJ and Eclipse are notorious about not properly handling this.
19 | * Defaults to true.
20 | */
21 | var synchronizeWriting: Boolean = true
22 |
23 | /**
24 | * Workaround for some consoles that don't play nicely with asynchronous calls
25 | */
26 | var alwaysFlush: Boolean = false
27 |
28 | val DefaultStringBuilderStartCapacity: Int = 512
29 |
30 | var stringBuilderStartCapacity: Int = DefaultStringBuilderStartCapacity
31 |
32 | private val stringBuilders = new ThreadLocal[mutable.StringBuilder] {
33 | override def initialValue(): mutable.StringBuilder = new mutable.StringBuilder(stringBuilderStartCapacity)
34 | }
35 |
36 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
37 | val stream = if (record.level <= Level.Info) {
38 | Logger.system.out
39 | } else {
40 | Logger.system.err
41 | }
42 | write(stream, output, outputFormat)
43 | }
44 |
45 | def write[M](stream: PrintStream, output: LogOutput, outputFormat: OutputFormat): Unit = {
46 | val sb = stringBuilders.get()
47 | outputFormat.begin(sb.append(_))
48 | outputFormat(output, s => sb.append(s))
49 | outputFormat.end(sb.append(_))
50 | if (synchronizeWriting) {
51 | synchronized {
52 | stream.println(sb.toString())
53 | if (alwaysFlush) stream.flush()
54 | }
55 | } else {
56 | stream.println(sb.toString())
57 | if (alwaysFlush) stream.flush()
58 | }
59 | sb.clear()
60 | }
61 | }
--------------------------------------------------------------------------------
/core/shared/src/main/scala/scribe/writer/Writer.scala:
--------------------------------------------------------------------------------
1 | package scribe.writer
2 |
3 | import scribe.LogRecord
4 | import scribe.output.LogOutput
5 | import scribe.output.format.OutputFormat
6 |
7 | trait Writer {
8 | def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit
9 |
10 | def dispose(): Unit = {}
11 | }
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/AbbreviatorSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.util.Abbreviator
6 |
7 | import scala.language.implicitConversions
8 |
9 | class AbbreviatorSpec extends AnyWordSpec with Matchers {
10 | "Abbreviator" should {
11 | val className1 = "mainPackage.sub.sample.Bar"
12 | val className2 = "mainPackage.sub.sample.FooBar"
13 |
14 | "properly abbreviate 26 length" in {
15 | val s = Abbreviator(className1, 26)
16 | s should be(className1)
17 | }
18 | "properly abbreviate 16 length" in {
19 | val s = Abbreviator(className1, 16)
20 | s should be("m.sub.sample.Bar")
21 | }
22 | "properly abbreviate 15 length" in {
23 | val s = Abbreviator(className1, 15)
24 | s should be("m.s.sample.Bar")
25 | }
26 | "properly abbreviate 10 length" in {
27 | val s = Abbreviator(className1, 10)
28 | s should be("m.s.s.Bar")
29 | }
30 | "properly abbreviate 5 length" in {
31 | val s = Abbreviator(className1, 5)
32 | s should be("Bar")
33 | }
34 | "properly abbreviate 0 length" in {
35 | val s = Abbreviator(className1, 0)
36 | s should be("Bar")
37 | }
38 | "properly abbreviate longer class name at 5" in {
39 | val s = Abbreviator(className2, 5, abbreviateName = true)
40 | s should be("Fo...")
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/ImplicitLoggingSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 |
6 | import scala.language.implicitConversions
7 |
8 | class ImplicitLoggingSpec extends AnyWordSpec with Matchers {
9 | "implicit logger" should {
10 | "config properly" in {
11 | ImplicitLoggingTestObject.initialize()
12 | }
13 | "properly log a simple message" in {
14 | val line = Some(14)
15 |
16 | ImplicitLoggingTestObject.doSomething()
17 | ImplicitLoggingTestObject.writer.records.length should be(1)
18 | val record = ImplicitLoggingTestObject.writer.records.head
19 | record.className should be("specs.ImplicitLoggingTestObject")
20 | record.methodName should be(Some("doSomething"))
21 | record.line should be(line)
22 | }
23 | }
24 | }
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/ImplicitLoggingTestObject.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import scribe._
4 | import scribe.writer.CacheWriter
5 |
6 | object ImplicitLoggingTestObject {
7 | val writer = new CacheWriter
8 |
9 | def initialize(): Unit = {
10 | this.logger.orphan().withHandler(writer = writer).replace()
11 | }
12 |
13 | def doSomething(): Unit = {
14 | scribe.info("did something!")
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/LogFeatureSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.writer.CacheWriter
6 |
7 | import scribe._
8 |
9 | class LogFeatureSpec extends AnyWordSpec with Matchers {
10 | private lazy val writer = new CacheWriter()
11 |
12 | "LogFeature" should {
13 | "initialize" in {
14 | Logger.reset()
15 | Logger.root.clearHandlers().withHandler(
16 | writer = writer
17 | ).replace()
18 | }
19 | "log with data" in {
20 | scribe.info("testing", data("foo", "bar"))
21 | }
22 | "verify the data was written" in {
23 | writer.consume { records =>
24 | records.length should be(1)
25 | val record = records.head
26 | record.data("foo")() should be("bar")
27 | }
28 | }
29 | "log without data" in {
30 | scribe.info("testing")
31 | }
32 | "verify the data was not written" in {
33 | writer.consume { records =>
34 | records.length should be(1)
35 | val record = records.head
36 | record.data.get("foo") should be(None)
37 | }
38 | }
39 | "log a Throwable" in {
40 | val t: Throwable = new RuntimeException("Testing")
41 | scribe.info(t.getMessage, t)
42 | writer.consume { records =>
43 | records.map(_.messages.map(_.value.toString.takeWhile(_ != '('))) should be(List(List("Testing", "Trace")))
44 | }
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/LoggingLevelFilteringSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.filter._
6 | import scribe.{Level, LogFeature, Logger}
7 | import scribe.writer.CacheWriter
8 |
9 | class LoggingLevelFilteringSpec extends AnyWordSpec with Matchers {
10 | private lazy val errorWriter = new CacheWriter()
11 | private lazy val traceWriter = new CacheWriter()
12 | private lazy val debugWriter = new CacheWriter()
13 |
14 | private val pkg1 = "specs"
15 | private val pkg2 = "com.foo"
16 |
17 | "Logging Level Filtering" should {
18 | "configure the loggers" in {
19 | Logger.reset()
20 | Logger.root
21 | .clearHandlers()
22 | .withMinimumLevel(Level.Info)
23 | .withHandler(writer = errorWriter, minimumLevel = Some(Level.Error))
24 | .withHandler(
25 | writer = traceWriter,
26 | modifiers = List(
27 | select(packageName(pkg1), packageName(pkg2))
28 | .include(level === Level.Trace)
29 | .excludeUnselected
30 | )
31 | )
32 | .withHandler(writer = debugWriter, minimumLevel = Some(Level.Debug))
33 | .replace()
34 | Logger(pkg1).withMinimumLevel(Level.Trace).replace()
35 | }
36 | "verify an error gets logged" in {
37 | scribe.error("Error1")
38 | errorWriter.consumeMessages { list =>
39 | list should be(List("Error1"))
40 | }
41 | traceWriter.consumeMessages { list =>
42 | list should be(Nil)
43 | }
44 | debugWriter.consumeMessages { list =>
45 | list should be(List("Error1"))
46 | }
47 | }
48 | "verify a trace message gets logged" in {
49 | scribe.trace("Trace1")
50 | errorWriter.consumeMessages { list =>
51 | list should be(Nil)
52 | }
53 | traceWriter.consumeMessages { list =>
54 | list should be(List("Trace1"))
55 | }
56 | debugWriter.consumeMessages { list =>
57 | list should be(Nil)
58 | }
59 | }
60 | "configure the logger for only errors" in {
61 | Logger.reset()
62 | Logger.root
63 | .clearHandlers()
64 | .withHandler(writer = errorWriter, minimumLevel = Some(Level.Error))
65 | .replace()
66 | }
67 | "verify an info message doesn't get evaluated" in {
68 | scribe.info {
69 | throw new RuntimeException("Should not evaluate!")
70 | "testing"
71 | }
72 | errorWriter.consumeMessages { list =>
73 | list should be(Nil)
74 | }
75 | traceWriter.consumeMessages { list =>
76 | list should be(Nil)
77 | }
78 | debugWriter.consumeMessages { list =>
79 | list should be(Nil)
80 | }
81 | }
82 | "reset the root logger" in {
83 | Logger.root.reset()
84 | }
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/LoggingTestObject.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import scribe._
4 | import scribe.writer.CacheWriter
5 |
6 | class LoggingTestObject(writer: CacheWriter) extends Logging {
7 | logger.orphan().withHandler(writer = writer).replace()
8 |
9 | private val anonymous = () => {
10 | LoggingTestObject.this.logger.info("Anonymous logging!")
11 | }
12 |
13 | def testLogger(): Unit = {
14 | logger.info("This is a test!")
15 | }
16 |
17 | def testAnonymous(): Unit = {
18 | anonymous()
19 | }
20 |
21 | def testException(): Unit = {
22 | logger.info(new RuntimeException("Testing"))
23 | }
24 |
25 | def testLoggerException(): Unit = {
26 | logger.info("Oh no", new RuntimeException("Testing"))
27 | }
28 | }
--------------------------------------------------------------------------------
/core/shared/src/test/scala/specs/MDCSpec.scala:
--------------------------------------------------------------------------------
1 | package specs
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.Logger
6 | import scribe.mdc._
7 | import scribe.writer.CacheWriter
8 |
9 | class MDCSpec extends AnyWordSpec with Matchers {
10 | private lazy val writer = new CacheWriter
11 | private lazy val logger = Logger("mdc-test").orphan().withHandler(writer = writer)
12 |
13 | "MDC" should {
14 | "set a simple value to MDC and it get logged" in {
15 | MDC("test") = "simple value"
16 | try {
17 | logger.info("Simple test")
18 | writer.consume { list =>
19 | list.map(_.data("test")()) should be(List("simple value"))
20 | }
21 | } finally {
22 | MDC.remove("test")
23 | }
24 | }
25 | "use context to set and remove the value" in {
26 | val key = "contextualized"
27 | logger.info("One")
28 | MDC.context(key -> "testing") {
29 | logger.info("Two")
30 | }
31 | logger.info("Three")
32 | writer.consume { list =>
33 | list.map(r => r.messages.head.logOutput.plainText -> r.data.get(key).map(_())) should be(List(
34 | "Three" -> None,
35 | "Two" -> Some("testing"),
36 | "One" -> None
37 | ))
38 | }
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/fileModule/jvm/src/main/scala/scribe/file/Platform.scala:
--------------------------------------------------------------------------------
1 | package scribe.file
2 |
3 | object Platform {
4 | def addShutdownHook(f: => Unit): Unit = Runtime.getRuntime.addShutdownHook(new Thread {
5 | override def run(): Unit = f
6 | })
7 | }
--------------------------------------------------------------------------------
/fileModule/jvm/src/test/scala/spec/StressTestFileLogging.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import scribe._
4 | import scribe.file.{FileWriter, _}
5 | import scribe.output.format.ASCIIOutputFormat
6 |
7 | import scala.io.Source
8 |
9 | object StressTestFileLogging {
10 | def main(args: Array[String]): Unit = {
11 | val writer = FileWriter(
12 | "logs" / ("stress" % rolling("-" % year % "-" % month % "-" % day) % ".log"),
13 | // "logs" / ("stress" % maxSize(max = 1024 * 1024 * 5) % maxLogs(5, 15.seconds) % ".log"),
14 | append = false
15 | )
16 | val logger = Logger.empty.orphan().withHandler(writer = writer, outputFormat = ASCIIOutputFormat)
17 | val total = 10000000
18 |
19 | elapsed {
20 | (0 until total).foreach { index =>
21 | logger.info(s"Logging $index")
22 | if (index % 100000 == 0) {
23 | scribe.info(s"Logged $index records")
24 | }
25 | }
26 | scribe.info(s"Logged $total records!")
27 | writer.flush()
28 | scribe.info("Flushed!")
29 | }
30 | val file = writer.file
31 | writer.dispose()
32 | val lines = {
33 | val source = Source.fromFile(file)
34 | try {
35 | source.getLines().size
36 | } finally {
37 | source.close()
38 | }
39 | }
40 | scribe.info(s"Lines: $lines")
41 | file.delete()
42 | }
43 | }
--------------------------------------------------------------------------------
/fileModule/native/src/main/scala/scribe/file/Platform.scala:
--------------------------------------------------------------------------------
1 | package scribe.file
2 |
3 | import scala.util.Try
4 |
5 | object Platform {
6 | private var hooks = List.empty[() => Unit]
7 |
8 | scala.scalanative.libc.stdlib.atexit(Platform.callHooks _)
9 |
10 | def addShutdownHook(f: => Unit): Unit = synchronized {
11 | hooks = hooks ::: List(() => f)
12 | }
13 |
14 | def callHooks(): Unit = {
15 | hooks.foreach { f =>
16 | Try(f()).failed.foreach { throwable =>
17 | throwable.printStackTrace()
18 | }
19 | }
20 | }
21 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/FileWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.file
2 |
3 | import scribe.LogRecord
4 | import scribe.output.LogOutput
5 | import scribe.output.format.OutputFormat
6 | import scribe.util.Time
7 | import scribe.writer.Writer
8 |
9 | import java.io.File
10 | import java.nio.charset.Charset
11 | import scala.concurrent.ExecutionContext
12 |
13 | case class FileWriter(pathBuilder: PathBuilder = PathBuilder.Default,
14 | append: Boolean = true,
15 | flushMode: FlushMode = FlushMode.AsynchronousFlush()(scribe.Execution.global),
16 | charset: Charset = Charset.defaultCharset()) extends Writer {
17 | private var previousFile: Option[File] = None
18 | private var _file: File = resolveFile()
19 |
20 | def file: File = _file
21 |
22 | def list(): List[File] = pathBuilder.iterator().toList.sortBy(_.lastModified())
23 |
24 | def resolveFile(): File = pathBuilder.file(Time())
25 |
26 | def updatePath(): Unit = {
27 | val newFile = resolveFile()
28 | _file = newFile
29 | }
30 |
31 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = synchronized {
32 | pathBuilder.before(this)
33 |
34 | // Write to LogFile
35 | val logFile = LogFile(this)
36 | if (!previousFile.contains(_file) || logFile.size == 0L) {
37 | previousFile = Some(_file)
38 | if (_file.length() == 0L || !append) {
39 | outputFormat.init(logFile.write)
40 | }
41 | }
42 | outputFormat.begin(logFile.write)
43 | outputFormat(output, logFile.write)
44 | outputFormat.end(logFile.write)
45 | logFile.write(System.lineSeparator())
46 |
47 | pathBuilder.after(this)
48 | }
49 |
50 | def flush(): Unit = LogFile(this).flush()
51 |
52 | def flushNever: FileWriter = copy(flushMode = FlushMode.NeverFlush)
53 | def flushAlways: FileWriter = copy(flushMode = FlushMode.AlwaysFlush)
54 | def flushAsync(implicit ec: ExecutionContext): FileWriter = copy(flushMode = FlushMode.AsynchronousFlush())
55 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/FlushMode.scala:
--------------------------------------------------------------------------------
1 | package scribe.file
2 |
3 | import scribe.file.writer.LogFileWriter
4 | import scribe.util.Time
5 |
6 | import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong}
7 | import scala.concurrent.duration._
8 | import scala.concurrent.{ExecutionContext, Future}
9 |
10 | trait FlushMode {
11 | def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit
12 | }
13 |
14 | object FlushMode {
15 | object NeverFlush extends FlushMode {
16 | override def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit = {}
17 | }
18 |
19 | object AlwaysFlush extends FlushMode {
20 | override def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit = writer.flush()
21 | }
22 |
23 | case class AsynchronousFlush(delay: FiniteDuration = 1.second)(implicit ec: ExecutionContext) extends FlushMode {
24 | private lazy val delayMillis = delay.toMillis
25 | private lazy val flushing = new AtomicBoolean(false)
26 | private lazy val dirty = new AtomicBoolean(false)
27 | private lazy val lastFlush = new AtomicLong(0L)
28 | private var logFile: LogFile = _
29 |
30 | override def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit = {
31 | this.logFile = logFile
32 | if (flushing.compareAndSet(false, true)) {
33 | flush()
34 | } else {
35 | dirty.set(true)
36 | }
37 | }
38 |
39 | private def flush(): Unit = Future {
40 | try {
41 | val delay = this.delayMillis - (Time() - lastFlush.get())
42 | if (delay > 0L) {
43 | Thread.sleep(delay)
44 | }
45 | logFile.flush()
46 | } finally {
47 | lastFlush.set(Time())
48 | if (dirty.compareAndSet(true, false)) {
49 | flush()
50 | } else {
51 | flushing.set(false)
52 | }
53 | }
54 | }
55 | }
56 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/LogFileStatus.scala:
--------------------------------------------------------------------------------
1 | package scribe.file
2 |
3 | sealed trait LogFileStatus
4 |
5 | object LogFileStatus {
6 | case object Inactive extends LogFileStatus
7 | case object Active extends LogFileStatus
8 | case object Disposed extends LogFileStatus
9 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/PathBuilder.scala:
--------------------------------------------------------------------------------
1 | package scribe.file
2 |
3 | import scribe.file.path.PathPart
4 |
5 | import java.io.File
6 | import java.nio.file.Path
7 |
8 | case class PathBuilder(parts: List[PathPart]) {
9 | def before(writer: FileWriter): Unit = parts.foreach(_.before(writer))
10 | def after(writer: FileWriter): Unit = parts.foreach(_.after(writer))
11 |
12 | def file(timeStamp: Long): File = {
13 | val path = parts.foldLeft(PathBuilder.DefaultPath)((previous, part) => part.current(previous, timeStamp))
14 | new File(path)
15 | }
16 |
17 | def iterator(): Iterator[File] = parts
18 | .foldLeft(Iterator(PathBuilder.DefaultPath))((previous, part) => previous.flatMap(part.all))
19 | .map(new File(_))
20 |
21 | def /(part: PathPart): PathBuilder = copy(parts ::: List(part))
22 | }
23 |
24 | object PathBuilder {
25 | lazy val DefaultPath: String = new File("logs", "app.log").getAbsolutePath
26 | lazy val Default: PathBuilder = PathBuilder(List(PathPart.SetPath(DefaultPath)))
27 |
28 | def static(path: Path): PathBuilder = PathBuilder(List(PathPart.SetPath(path.toString)))
29 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/package.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import scribe.file.path.PathPart.FileName
4 | import scribe.file.path._
5 |
6 | import java.io.File
7 | import java.nio.file.Path
8 | import scala.concurrent.duration.{DurationInt, FiniteDuration}
9 | import scala.language.implicitConversions
10 |
11 | package object file {
12 | val DefaultBufferSize: Int = 1024
13 |
14 | implicit def pathPart2PathBuilder(part: PathPart): PathBuilder = PathBuilder(List(part))
15 | implicit def path2PathBuilder(path: Path): PathBuilder = PathBuilder(List(PathPart.SetPath(path.toAbsolutePath.toString)))
16 | implicit def file2PathBuilder(file: File): PathBuilder = PathBuilder(List(PathPart.SetPath(file.getAbsolutePath)))
17 | implicit def string2PathBuilder(s: String): PathBuilder = PathBuilder(List(PathPart.SetPath(s)))
18 | implicit def string2FileName(s: String): FileName = FileName(List(FileNamePart.Static(s)))
19 | implicit def string2FileNamePart(s: String): FileNamePart = FileNamePart.Static(s)
20 | implicit def fileNamePart2FileName(part: FileNamePart): FileName = FileName(List(part))
21 |
22 | def second: FileNamePart = FileNamePart.Second
23 | def minute: FileNamePart = FileNamePart.Minute
24 | def hour: FileNamePart = FileNamePart.Hour
25 | def day: FileNamePart = FileNamePart.Day
26 | def month: FileNamePart = FileNamePart.Month
27 | def year: FileNamePart = FileNamePart.Year
28 | def rolling(fileName: FileName,
29 | truncate: Boolean = true,
30 | minimumValidationFrequency: FiniteDuration = 5.minutes): FileNamePart = {
31 | Rolling(fileName.parts, (current, path) => {
32 | LogFile.get(current) match {
33 | case Some(logFile) => {
34 | if (truncate) {
35 | LogFile.copy(logFile, path)
36 | LogFile.truncate(logFile)
37 | } else {
38 | LogFile.move(logFile, path)
39 | }
40 | }
41 | case None => {
42 | if (truncate) {
43 | LogFile.copy(current, path)
44 | LogFile.truncate(current)
45 | } else {
46 | LogFile.move(current, path)
47 | }
48 | }
49 | }
50 | }, minimumValidationFrequency)
51 | }
52 | def rollingGZIP(fileName: FileName = string2FileName(".gz"),
53 | deleteOriginal: Boolean = true,
54 | bufferSize: Int = DefaultBufferSize,
55 | minimumValidationFrequency: FiniteDuration = 5.minutes): FileNamePart = {
56 | Rolling(fileName.parts, (current, path) => {
57 | LogFile.get(current) match {
58 | case Some(logFile) => LogFile.gzip(logFile, path, deleteOriginal, bufferSize)
59 | case None => LogFile.gzip(current, path, deleteOriginal, bufferSize)
60 | }
61 | }, minimumValidationFrequency)
62 | }
63 | def maxSize(max: Long = MaxSize.OneHundredMeg, separator: String = "-"): FileNamePart = MaxSize(max, separator)
64 | def maxLogs(max: Int = 10, checkFrequency: FiniteDuration = 15.minutes): FileNamePart = MaxLogs(max, checkFrequency)
65 |
66 | def daily(separator: String = "-"): FileName = year % separator % month % separator % day
67 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/path/FileNamePart.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.path
2 |
3 | import perfolation._
4 | import scribe.file.FileWriter
5 |
6 | import java.util.Calendar
7 |
8 | trait FileNamePart {
9 | def current(timeStamp: Long): String
10 |
11 | def regex: String
12 |
13 | def before(writer: FileWriter): Unit = {}
14 |
15 | def after(writer: FileWriter): Unit = {}
16 |
17 | def nextValidation(timeStamp: Long): Option[Long] = None
18 | }
19 |
20 | object FileNamePart {
21 | case class Static(s: String) extends FileNamePart {
22 | override def current(timeStamp: Long): String = s
23 |
24 | override def regex: String = s
25 | }
26 |
27 | case object Year extends FileNamePart {
28 | override def current(timeStamp: Long): String = timeStamp.t.year.toString
29 |
30 | override def regex: String = "\\d{4}"
31 |
32 | override def nextValidation(timeStamp: Long): Option[Long] = {
33 | val c = Calendar.getInstance()
34 | c.setTimeInMillis(timeStamp)
35 | c.add(Calendar.YEAR, 1)
36 | c.set(Calendar.DAY_OF_YEAR, 1)
37 | c.set(Calendar.HOUR_OF_DAY, 0)
38 | c.set(Calendar.MINUTE, 0)
39 | c.set(Calendar.SECOND, 0)
40 | c.set(Calendar.MILLISECOND, 0)
41 | Some(c.getTimeInMillis)
42 | }
43 | }
44 |
45 | case object Month extends FileNamePart {
46 | override def current(timeStamp: Long): String = timeStamp.t.m
47 |
48 | override def regex: String = "\\d{2}"
49 |
50 | override def nextValidation(timeStamp: Long): Option[Long] = {
51 | val c = Calendar.getInstance()
52 | c.setTimeInMillis(timeStamp)
53 | c.add(Calendar.MONTH, 1)
54 | c.set(Calendar.DAY_OF_MONTH, 1)
55 | c.set(Calendar.HOUR_OF_DAY, 0)
56 | c.set(Calendar.MINUTE, 0)
57 | c.set(Calendar.SECOND, 0)
58 | c.set(Calendar.MILLISECOND, 0)
59 | Some(c.getTimeInMillis)
60 | }
61 | }
62 |
63 | case object Day extends FileNamePart {
64 | override def current(timeStamp: Long): String = timeStamp.t.d
65 |
66 | override def regex: String = "\\d{2}"
67 |
68 | override def nextValidation(timeStamp: Long): Option[Long] = {
69 | val c = Calendar.getInstance()
70 | c.setTimeInMillis(timeStamp)
71 | c.add(Calendar.DAY_OF_MONTH, 1)
72 | c.set(Calendar.HOUR_OF_DAY, 0)
73 | c.set(Calendar.MINUTE, 0)
74 | c.set(Calendar.SECOND, 0)
75 | c.set(Calendar.MILLISECOND, 0)
76 | Some(c.getTimeInMillis)
77 | }
78 | }
79 |
80 | case object Hour extends FileNamePart {
81 | override def current(timeStamp: Long): String = timeStamp.t.H
82 |
83 | override def regex: String = "\\d{2}"
84 |
85 | override def nextValidation(timeStamp: Long): Option[Long] = {
86 | val c = Calendar.getInstance()
87 | c.setTimeInMillis(timeStamp)
88 | c.add(Calendar.HOUR_OF_DAY, 1)
89 | c.set(Calendar.MINUTE, 0)
90 | c.set(Calendar.SECOND, 0)
91 | c.set(Calendar.MILLISECOND, 0)
92 | Some(c.getTimeInMillis)
93 | }
94 | }
95 |
96 | case object Minute extends FileNamePart {
97 | override def current(timeStamp: Long): String = timeStamp.t.M
98 |
99 | override def regex: String = "\\d{2}"
100 |
101 | override def nextValidation(timeStamp: Long): Option[Long] = {
102 | val c = Calendar.getInstance()
103 | c.setTimeInMillis(timeStamp)
104 | c.add(Calendar.MINUTE, 1)
105 | c.set(Calendar.SECOND, 0)
106 | c.set(Calendar.MILLISECOND, 0)
107 | Some(c.getTimeInMillis)
108 | }
109 | }
110 |
111 | case object Second extends FileNamePart {
112 | override def current(timeStamp: Long): String = timeStamp.t.S
113 |
114 | override def regex: String = "\\d{2}"
115 |
116 | override def nextValidation(timeStamp: Long): Option[Long] = {
117 | val c = Calendar.getInstance()
118 | c.setTimeInMillis(timeStamp)
119 | c.add(Calendar.SECOND, 1)
120 | c.set(Calendar.MILLISECOND, 0)
121 | Some(c.getTimeInMillis)
122 | }
123 | }
124 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/path/MaxLogs.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.path
2 |
3 | import scribe.file.FileWriter
4 | import scribe.util.Time
5 |
6 | import scala.concurrent.duration._
7 |
8 | case class MaxLogs(maxLogs: Int, checkFrequency: FiniteDuration) extends FileNamePart {
9 | private var nextRun: Long = 0L
10 |
11 | override def current(timeStamp: Long): String = ""
12 |
13 | override def regex: String = ""
14 |
15 | override def after(writer: FileWriter): Unit = if (Time() >= nextRun) {
16 | writer.list().dropRight(maxLogs).foreach { file =>
17 | if (!file.delete()) {
18 | file.deleteOnExit()
19 | }
20 | }
21 | nextRun = Time() + checkFrequency.toMillis
22 | }
23 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/path/MaxSize.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.path
2 |
3 | import scribe.file.{FileWriter, LogFile}
4 | import scribe.util.Time
5 |
6 | import java.io.File
7 |
8 | case class MaxSize(maxSizeInBytes: Long, separator: String) extends FileNamePart {
9 | private val threadLocal = new ThreadLocal[Int] {
10 | override def initialValue(): Int = 0
11 | }
12 |
13 | override def current(timeStamp: Long): String = {
14 | val i = threadLocal.get()
15 | if (i == 0) {
16 | ""
17 | } else {
18 | s"$separator$i"
19 | }
20 | }
21 |
22 | override def regex: String = s"([$separator]\\d*)?"
23 |
24 | override def before(writer: FileWriter): Unit = {
25 | val logFile = LogFile(writer)
26 | if (logFile.size >= maxSizeInBytes && logFile.file.exists()) {
27 | val path = fileFor(writer, 1)
28 | val lastModified = logFile.file.lastModified()
29 | rollPaths(writer)
30 | LogFile.move(logFile, path)
31 | path.setLastModified(lastModified)
32 | }
33 | }
34 |
35 | private def rollPaths(writer: FileWriter, i: Int = 1): Unit = {
36 | val path = fileFor(writer, i)
37 | if (path.exists()) {
38 | rollPaths(writer, i + 1)
39 | val nextPath = fileFor(writer, i + 1)
40 | val lastModified = path.lastModified()
41 | LogFile.copy(path, nextPath)
42 | LogFile.truncate(path)
43 | nextPath.setLastModified(lastModified)
44 | }
45 | }
46 |
47 | private def fileFor(writer: FileWriter, i: Int): File = {
48 | threadLocal.set(i)
49 | try {
50 | writer.pathBuilder.file(Time())
51 | } finally {
52 | threadLocal.remove()
53 | }
54 | }
55 | }
56 | object MaxSize {
57 | val OneHundredMeg: Long = 100000000L
58 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/path/PathPart.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.path
2 |
3 | import scribe.file.{FileWriter, string2FileNamePart}
4 | import scribe.util.Time
5 |
6 | import java.io.{File, FilenameFilter}
7 |
8 | trait PathPart {
9 | def current(previous: String, timeStamp: Long): String
10 |
11 | def all(previous: String): Iterator[String]
12 |
13 | def before(writer: FileWriter): Unit = {}
14 | def after(writer: FileWriter): Unit = {}
15 |
16 | def nextValidation(timeStamp: Long): Option[Long] = None
17 | }
18 |
19 | object PathPart {
20 | case object Root extends PathPart {
21 | override def current(previous: String, timeStamp: Long): String = "/"
22 |
23 | override def all(previous: String): Iterator[String] = Iterator("/")
24 | }
25 |
26 | case class SetPath(path: String) extends PathPart {
27 | override def current(previous: String, timeStamp: Long): String = path
28 |
29 | override def all(previous: String): Iterator[String] = Iterator(path)
30 | }
31 |
32 | case class FileName(parts: List[FileNamePart]) extends PathPart with FileNamePart {
33 | private var fileName: String = _
34 |
35 | override def current(previous: String, timeStamp: Long): String = {
36 | val c = parts.map(_.current(timeStamp)).mkString
37 | s"$previous/$c"
38 | }
39 |
40 | override def all(previous: String): Iterator[String] = {
41 | val regex = parts.map(_.regex).mkString
42 |
43 | val previousFile = new File(previous)
44 | if (previousFile.exists()) {
45 | previousFile.listFiles(new FilenameFilter {
46 | override def accept(dir: File, name: String): Boolean = name.matches(regex)
47 | }).iterator.map(_.getAbsolutePath)
48 | } else {
49 | Nil.iterator
50 | }
51 | }
52 |
53 | override def before(writer: FileWriter): Unit = {
54 | val timeStamp = Time()
55 | val updated = parts.map(_.current(timeStamp)).mkString
56 | val changed = updated != fileName
57 | fileName = updated
58 | if (changed) {
59 | writer.updatePath()
60 | }
61 | parts.foreach(_.before(writer))
62 | }
63 |
64 |
65 | override def after(writer: FileWriter): Unit = {
66 | parts.foreach(_.after(writer))
67 | }
68 |
69 | override def current(timeStamp: Long): String = parts.map(_.current(timeStamp)).mkString
70 |
71 | override def regex: String = parts.map(_.regex).mkString
72 |
73 | def %(part: FileNamePart): FileName = copy(parts ::: List(part))
74 | def %(s: String): FileName = %(string2FileNamePart(s))
75 |
76 | override def nextValidation(timeStamp: Long): Option[Long] = parts.flatMap(_.nextValidation(timeStamp)) match {
77 | case Nil => None
78 | case l => Some(l.min)
79 | }
80 | }
81 | }
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/path/Rolling.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.path
2 |
3 | import scribe.file.FileWriter
4 | import scribe.util.Time
5 |
6 | import java.io.File
7 | import scala.concurrent.duration._
8 |
9 | case class Rolling(parts: List[FileNamePart],
10 | action: (File, File) => Unit,
11 | minimumValidationFrequency: FiniteDuration) extends FileNamePart {
12 | private lazy val partsRegex = parts.map(_.regex).mkString
13 | private val threadLocal = new ThreadLocal[Rolling.Mode] {
14 | override def initialValue(): Rolling.Mode = Rolling.Standard
15 | }
16 |
17 | override def current(timeStamp: Long): String = threadLocal.get() match {
18 | case Rolling.Standard | Rolling.OnlyCurrent => ""
19 | case Rolling.OnlyRolling => parts.map(_.current(timeStamp)).mkString
20 | }
21 |
22 | override def regex: String = threadLocal.get() match {
23 | case Rolling.Standard => s"($partsRegex)?"
24 | case Rolling.OnlyCurrent => ""
25 | case Rolling.OnlyRolling => partsRegex
26 | }
27 |
28 | private object nextRunFor {
29 | private var map: Map[FileWriter, Long] = Map.empty
30 |
31 | def apply(writer: FileWriter): Long = synchronized {
32 | map.getOrElse(writer, 0L)
33 | }
34 |
35 | def update(writer: FileWriter, nextRun: Long): Unit = synchronized {
36 | map += writer -> nextRun
37 | }
38 | }
39 |
40 | override def before(writer: FileWriter): Unit = if (Time() >= nextRunFor(writer)) {
41 | val currentPaths: List[File] = {
42 | threadLocal.set(Rolling.OnlyCurrent)
43 | try {
44 | writer.list()
45 | } finally {
46 | threadLocal.remove()
47 | }
48 | }
49 | val existing: File = {
50 | threadLocal.set(Rolling.OnlyRolling)
51 | try {
52 | writer.resolveFile()
53 | } finally {
54 | threadLocal.remove()
55 | }
56 | }
57 |
58 | currentPaths.foreach { cp =>
59 | val lastModified = cp.lastModified()
60 | val rp = rollingFile(lastModified, writer)
61 | if (rp != existing && !rp.exists()) {
62 | action(cp, rp)
63 | }
64 | }
65 |
66 | nextRunFor(writer) = (Time() + minimumValidationFrequency.toMillis :: parts.flatMap(_.nextValidation(Time()))).min
67 | }
68 |
69 | def rollingFile(timeStamp: Long, writer: FileWriter): File = {
70 | threadLocal.set(Rolling.OnlyRolling)
71 | try {
72 | writer.pathBuilder.file(timeStamp)
73 | } finally {
74 | threadLocal.remove()
75 | }
76 | }
77 | }
78 |
79 | object Rolling {
80 | sealed trait Mode
81 |
82 | case object Standard extends Mode
83 | case object OnlyCurrent extends Mode
84 | case object OnlyRolling extends Mode
85 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/writer/IOLogFileWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.writer
2 |
3 | import scribe.file.LogFile
4 |
5 | import java.io.{File, FileWriter, PrintWriter}
6 |
7 | class IOLogFileWriter(lf: LogFile) extends LogFileWriter {
8 | private lazy val file: File = lf.file
9 | private lazy val writer: PrintWriter = new PrintWriter(new FileWriter(file, lf.append))
10 |
11 | override def write(output: String): Unit = if (output == None.orNull) {
12 | writer.write("null")
13 | } else {
14 | writer.write(output)
15 | }
16 |
17 | override def flush(): Unit = writer.flush()
18 |
19 | override def dispose(): Unit = writer.close()
20 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/writer/LogFileWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.writer
2 |
3 | import scribe.file.LogFile
4 |
5 | trait LogFileWriter {
6 | def write(output: String): Unit
7 |
8 | def flush(): Unit
9 |
10 | def dispose(): Unit
11 | }
12 |
13 | object LogFileWriter {
14 | var default: LogFile => LogFileWriter = new IOLogFileWriter(_)
15 |
16 | def apply(logFile: LogFile): LogFileWriter = default(logFile)
17 | }
--------------------------------------------------------------------------------
/fileModule/shared/src/main/scala/scribe/file/writer/NIOLogFileWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.file.writer
2 |
3 | import scribe.file.LogFile
4 |
5 | import java.nio.ByteBuffer
6 | import java.nio.channels.FileChannel
7 | import java.nio.file.{OpenOption, StandardOpenOption}
8 | import scala.annotation.tailrec
9 |
10 | class NIOLogFileWriter(lf: LogFile) extends LogFileWriter {
11 | private lazy val options: List[OpenOption] = if (lf.append) {
12 | List(StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE)
13 | } else {
14 | List(StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE)
15 | }
16 | private lazy val channel: FileChannel = FileChannel.open(lf.file.toPath, options: _*)
17 |
18 | override def write(output: String): Unit = {
19 | val o = if (output == None.orNull) {
20 | "null"
21 | } else {
22 | output
23 | }
24 | val bytes = o.getBytes(lf.charset)
25 | val buffer = ByteBuffer.wrap(bytes)
26 | writeBuffer(buffer)
27 | buffer.clear()
28 | }
29 |
30 | @tailrec
31 | private def writeBuffer(buffer: ByteBuffer): Unit = if (buffer.hasRemaining) {
32 | channel.write(buffer)
33 | writeBuffer(buffer)
34 | }
35 |
36 | override def flush(): Unit = channel.force(false)
37 |
38 | override def dispose(): Unit = if (channel.isOpen) {
39 | channel.close()
40 | }
41 | }
--------------------------------------------------------------------------------
/jitpack.yml:
--------------------------------------------------------------------------------
1 | install:
2 | - sbt +macrosJS/publishM2 +macrosJVM/publishM2 +coreJS/publishM2 +coreJVM/publishM2 +slf4j/publishM2 +slf4j2/publishM2 +slack/publishM2 +logstash/publishM2
3 |
--------------------------------------------------------------------------------
/json/shared/src/main/scala/scribe/json/ScribeJsonSupport.scala:
--------------------------------------------------------------------------------
1 | package scribe.json
2 |
3 | import scribe.LogRecord
4 | import scribe.message.LoggableMessage
5 | import scribe.output.format.OutputFormat
6 | import scribe.output.{LogOutput, TextOutput}
7 | import scribe.writer.Writer
8 |
9 | import scala.language.implicitConversions
10 |
11 | trait ScribeJsonSupport[J] {
12 | implicit def json2LoggableMessage(json: J): LoggableMessage =
13 | LoggableMessage[J](json => new TextOutput(json2String(json)))(json)
14 |
15 | def json2String(json: J): String
16 |
17 | def logRecord2Json(record: LogRecord): J
18 |
19 | def jsonExtras(record: LogRecord, json: J): J = json
20 |
21 | def writer(writer: Writer): Writer = new Writer {
22 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
23 | val json = logRecord2Json(record)
24 | val jsonString = json2String(json)
25 | writer.write(record, new TextOutput(jsonString), outputFormat)
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/jsonCirce/shared/src/main/scala/scribe/json/ScribeCirceJsonSupport.scala:
--------------------------------------------------------------------------------
1 | package scribe.json
2 |
3 | import io.circe.Json.Null
4 | import io.circe.syntax.EncoderOps
5 | import io.circe.{Json, JsonObject}
6 | import perfolation.long2Implicits
7 | import scribe.LogRecord
8 | import scribe.mdc.MDC
9 | import scribe.message.Message
10 | import scribe.throwable.Trace
11 |
12 | import io.circe.generic.auto._
13 |
14 | trait ScribeCirceJsonSupport extends ScribeJsonSupport[Json] {
15 | def json2String(json: Json): String = json.noSpaces
16 |
17 | override def logRecord2Json(record: LogRecord): Json = {
18 | val l = record.timeStamp
19 | def trace2Json(trace: Trace): Json = JsonObject(
20 | "className" -> trace.className.asJson,
21 | "message" -> trace.message.asJson,
22 | "elements" -> trace.elements.map { e =>
23 | JsonObject(
24 | "class" -> e.`class`.asJson,
25 | "fileName" -> e.fileName.asJson,
26 | "method" -> e.method.asJson,
27 | "line" -> e.line.asJson
28 | )
29 | }.asJson,
30 | "cause" -> trace.cause.map(trace2Json).asJson
31 | ).asJson
32 | val traces = record.messages.map(_.value).collect {
33 | case trace: Trace => trace2Json(trace)
34 | } match {
35 | case Nil => Null
36 | case t :: Nil => t
37 | case list => list.asJson
38 | }
39 | val messages = record.messages.collect {
40 | case message: Message[_] if !message.value.isInstanceOf[Throwable] => message.value match {
41 | case json: Json => json
42 | case _ => message.logOutput.plainText.asJson
43 | }
44 | } match {
45 | case Nil => Null
46 | case m :: Nil => m
47 | case list => list.toVector.asJson
48 | }
49 | val data = MDC.map ++ record.data
50 | val json = JsonObject(
51 | "level" -> record.level.name.asJson,
52 | "levelValue" -> record.levelValue.asJson,
53 | "message" -> messages,
54 | "fileName" -> record.fileName.asJson,
55 | "className" -> record.className.asJson,
56 | "methodName" -> record.methodName.map(_.asJson).getOrElse(Null),
57 | "line" -> record.line.map(_.asJson).getOrElse(Null),
58 | "column" -> record.column.map(_.asJson).getOrElse(Null),
59 | "data" -> data.toList.map {
60 | case (key, value) => value() match {
61 | case json: Json => key -> json
62 | case any => key -> any.toString.asJson
63 | }
64 | }.asJson,
65 | "mdc" -> MDC.map.map {
66 | case (key, value) => value() match {
67 | case json: Json => key -> json
68 | case any => key -> any.toString.asJson
69 | }
70 | }.asJson,
71 | "trace" -> traces,
72 | "timeStamp" -> l.asJson,
73 | "date" -> l.t.F.asJson,
74 | "time" -> s"${l.t.T}.${l.t.L}${l.t.z}".asJson
75 | ).asJson
76 | jsonExtras(record, json)
77 | }
78 | }
79 |
80 | object ScribeCirceJsonSupport extends ScribeCirceJsonSupport
--------------------------------------------------------------------------------
/jsonCirce/shared/src/test/scala/spec/JsonWriterSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import io.circe.Json
4 | import io.circe.parser.parse
5 | import org.scalatest.Inside
6 | import org.scalatest.matchers.should.Matchers
7 | import org.scalatest.wordspec.AnyWordSpec
8 | import scribe.Logger
9 | import scribe.json.ScribeCirceJsonSupport._
10 | import scribe.util.Time
11 | import scribe.writer.CacheWriter
12 |
13 | class JsonWriterSpec extends AnyWordSpec with Matchers with Inside {
14 | "JsonWriter" should {
15 | var time: Long = 1609488000000L
16 | def logger: Logger = Logger("jsonWriterSpec")
17 | val cache = new CacheWriter
18 |
19 | "initialize properly" in {
20 | logger
21 | .orphan()
22 | .withHandler(writer = writer(cache))
23 | .replace()
24 | Time.function = () => time
25 | }
26 |
27 | "log a simple message" in {
28 | cache.clear()
29 | logger.info("Hello, Json!")
30 | cache.output.length should be(1)
31 | inside(parse(cache.output.head.plainText).toOption.flatMap(_.asObject)) {
32 | case Some(json) =>
33 | json("line").flatMap(_.asNumber).flatMap(_.toLong) should be(Some(29))
34 | json("fileName").flatMap(_.asString) should be(Some("JsonWriterSpec.scala"))
35 | json("message").flatMap(_.asString) should be(Some("Hello, Json!"))
36 | }
37 | }
38 |
39 | "log a simple message and exception" in {
40 | cache.clear()
41 | time += 1000L * 60 * 60 * 24
42 | logger.warn("Failure, Json!", new RuntimeException("Failure!"))
43 | cache.output.length should be(1)
44 | inside(parse(cache.output.head.plainText).toOption.flatMap(_.asObject)) {
45 | case Some(json) =>
46 | json("date").flatMap(_.asString) should be(Some("2021-01-02"))
47 | json("line").flatMap(_.asNumber).flatMap(_.toLong) should be(Some(42))
48 | json("fileName").flatMap(_.asString) should be(Some("JsonWriterSpec.scala"))
49 | json("message").flatMap(_.asString) should be(Some("Failure, Json!"))
50 | }
51 | }
52 |
53 | "log a JSON message" in {
54 | cache.clear()
55 | time += 1000L * 60 * 60 * 24
56 | logger.info(Json.fromFields(List("message" -> Json.fromString("JSON Message!"))))
57 | cache.records.length should be(1)
58 | val json = cache.records.head.messages.head.value.asInstanceOf[Json]
59 | inside(json.asObject) {
60 | case Some(jso) =>
61 | jso("message").flatMap(_.asString) should be(Some("JSON Message!"))
62 | }
63 | }
64 | }
65 | }
--------------------------------------------------------------------------------
/jsonFabric/jvm/src/test/scala/spec/JsonWriterSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import fabric._
4 | import fabric.io.{Format, JsonParser}
5 | import org.scalatest.matchers.should.Matchers
6 | import org.scalatest.wordspec.AnyWordSpec
7 | import scribe.Logger
8 | import scribe.json.ScribeFabricJsonSupport._
9 | import scribe.util.Time
10 | import scribe.writer.CacheWriter
11 |
12 | class JsonWriterSpec extends AnyWordSpec with Matchers {
13 | "JsonWriter" should {
14 | var time: Long = 1609488000000L
15 | def logger: Logger = Logger("jsonWriterSpec")
16 | val cache = new CacheWriter
17 |
18 | "initialize properly" in {
19 | logger
20 | .orphan()
21 | .withHandler(writer = writer(cache))
22 | .replace()
23 | Time.function = () => time
24 | }
25 | "log a simple message" in {
26 | cache.clear()
27 | logger.info("Hello, Json!")
28 | cache.output.length should be(1)
29 | val json = JsonParser(cache.output.head.plainText, Format.Json)
30 | json("date").asString should be("2021-01-01")
31 | json("line").asInt should be(27)
32 | json("fileName").asString should be("JsonWriterSpec.scala")
33 | json("message") should be(Str("Hello, Json!"))
34 | }
35 | "log a simple message and exception" in {
36 | cache.clear()
37 | time += 1000L * 60 * 60 * 24
38 | logger.warn("Failure, Json!", new RuntimeException("Failure!"))
39 | cache.output.length should be(1)
40 | val json = JsonParser(cache.output.head.plainText, Format.Json)
41 | json("date").asString should be("2021-01-02")
42 | json("line").asInt should be(38)
43 | json("fileName").asString should be("JsonWriterSpec.scala")
44 | json("message") should be(Str("Failure, Json!"))
45 | }
46 | "log a JSON message" in {
47 | cache.clear()
48 | time += 1000L * 60 * 60 * 24
49 | logger.info(obj(
50 | "message" -> "JSON Message!"
51 | ))
52 | cache.records.length should be(1)
53 | val json = cache.records.head.messages.head.value.asInstanceOf[Json]
54 | json("message") should be(Str("JSON Message!"))
55 | }
56 | }
57 | }
--------------------------------------------------------------------------------
/jsonFabric/shared/src/main/scala/scribe/json/ScribeFabricJsonSupport.scala:
--------------------------------------------------------------------------------
1 | package scribe.json
2 |
3 | import fabric._
4 | import fabric.rw._
5 | import fabric.io.JsonFormatter
6 | import scribe.LogRecord
7 | import scribe.mdc.MDC
8 | import scribe.message.Message
9 | import scribe.throwable.{Trace, TraceElement}
10 | import perfolation._
11 |
12 | trait ScribeFabricJsonSupport extends ScribeJsonSupport[Json] {
13 | private implicit val traceElementRW: RW[TraceElement] = RW.gen
14 | private implicit val traceRW: RW[Trace] = RW.gen
15 |
16 | override def json2String(json: Json): String = JsonFormatter.Compact(json)
17 |
18 | override def logRecord2Json(record: LogRecord): Json = {
19 | val l = record.timeStamp
20 | val traces = record.messages.map(_.value).collect {
21 | case trace: Trace => trace
22 | } match {
23 | case Nil => Null
24 | case t :: Nil => t.json
25 | case list => list.json
26 | }
27 | val messages = record.messages.collect {
28 | case message: Message[_] if !message.value.isInstanceOf[Throwable] => message.value match {
29 | case json: Json => json
30 | case _ => Str(message.logOutput.plainText)
31 | }
32 | } match {
33 | case Nil => Null
34 | case m :: Nil => m
35 | case list => Arr(list.toVector)
36 | }
37 | val data = MDC.map ++ record.data
38 | val json = obj(
39 | "level" -> record.level.name,
40 | "levelValue" -> record.levelValue,
41 | "message" -> messages,
42 | "fileName" -> record.fileName,
43 | "className" -> record.className,
44 | "methodName" -> record.methodName.map(_.json).getOrElse(Null),
45 | "line" -> record.line.map(_.json).getOrElse(Null),
46 | "column" -> record.column.map(_.json).getOrElse(Null),
47 | "data" -> data.map {
48 | case (key, value) => value() match {
49 | case json: Json => key -> json
50 | case any => key -> str(any.toString)
51 | }
52 | },
53 | "mdc" -> MDC.map.map {
54 | case (key, value) => value() match {
55 | case json: Json => key -> json
56 | case any => key -> str(any.toString)
57 | }
58 | },
59 | "trace" -> traces,
60 | "timeStamp" -> l,
61 | "date" -> l.t.F,
62 | "time" -> s"${l.t.T}.${l.t.L}${l.t.z}"
63 | )
64 | jsonExtras(record, json)
65 | }
66 | }
67 |
68 | object ScribeFabricJsonSupport extends ScribeFabricJsonSupport
--------------------------------------------------------------------------------
/local.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sbt +macrosJS/publishLocal +macrosJVM/publishLocal +coreJS/publishLocal +coreJVM/publishLocal +slf4j/publishLocal +slf4j18/publishLocal +slack/publishLocal +logstash/publishLocal ++2.11.12 macrosNative/publishLocal coreNative/publishLocal
--------------------------------------------------------------------------------
/log4j/src/main/resources/META-INF/services/org.apache.logging.log4j.spi.Provider:
--------------------------------------------------------------------------------
1 | scribe.ScribeProvider
--------------------------------------------------------------------------------
/log4j/src/main/scala/scribe/ScribeLoggerContext.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import org.apache.logging.log4j.message.MessageFactory
4 | import org.apache.logging.log4j.spi.{ExtendedLogger, LoggerContext, LoggerRegistry}
5 |
6 | object ScribeLoggerContext extends LoggerContext {
7 | private lazy val registry = new LoggerRegistry[ExtendedLogger]()
8 |
9 | override def getExternalContext: AnyRef = None.orNull
10 |
11 | override def getLogger(name: String): ExtendedLogger = {
12 | if (registry.hasLogger(name)) {
13 | registry.getLogger(name)
14 | } else {
15 | val logger = Logger(name)
16 | val l = Log4JLogger(logger.id)
17 | registry.putIfAbsent(name, None.orNull, l)
18 | l
19 | }
20 | }
21 |
22 | override def getLogger(name: String, messageFactory: MessageFactory): ExtendedLogger = {
23 | if (registry.hasLogger(name, messageFactory)) {
24 | registry.getLogger(name, messageFactory)
25 | } else {
26 | val logger = Logger(name)
27 | val l = Log4JLogger(logger.id)
28 | registry.putIfAbsent(name, messageFactory, l)
29 | l
30 | }
31 | }
32 |
33 | override def hasLogger(name: String): Boolean = registry.hasLogger(name)
34 |
35 | override def hasLogger(name: String,
36 | messageFactory: MessageFactory): Boolean = registry.hasLogger(name, messageFactory)
37 |
38 | override def hasLogger(name: String,
39 | messageFactoryClass: Class[_ <: MessageFactory]): Boolean =
40 | registry.hasLogger(name, messageFactoryClass)
41 | }
--------------------------------------------------------------------------------
/log4j/src/main/scala/scribe/ScribeLoggerContextFactory.scala:
--------------------------------------------------------------------------------
1 | package scribe
2 |
3 | import org.apache.logging.log4j.spi.{LoggerContext, LoggerContextFactory}
4 |
5 | import java.net.URI
6 |
7 | class ScribeLoggerContextFactory extends LoggerContextFactory {
8 | override def getContext(fqcn: String,
9 | loader: ClassLoader,
10 | externalContext: Any,
11 | currentContext: Boolean): LoggerContext = ScribeLoggerContext
12 |
13 | override def getContext(fqcn: String,
14 | loader: ClassLoader,
15 | externalContext: Any,
16 | currentContext: Boolean,
17 | configLocation: URI,
18 | name: String): LoggerContext = ScribeLoggerContext
19 |
20 | override def removeContext(context: LoggerContext): Unit = {}
21 | }
--------------------------------------------------------------------------------
/logstash/src/main/scala/scribe/logstash/LogstashRecord.scala:
--------------------------------------------------------------------------------
1 | package scribe.logstash
2 |
3 | import fabric.rw._
4 |
5 | case class LogstashRecord(messages: List[String],
6 | service: String,
7 | level: String,
8 | value: Double,
9 | fileName: String,
10 | className: String,
11 | methodName: Option[String],
12 | line: Option[Int],
13 | thread: String,
14 | `@timestamp`: String,
15 | mdc: Map[String, String],
16 | data: Map[String, String])
17 |
18 | object LogstashRecord {
19 | implicit val rw: RW[LogstashRecord] = RW.gen
20 | }
--------------------------------------------------------------------------------
/logstash/src/main/scala/scribe/logstash/LogstashWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.logstash
2 |
3 | import fabric.rw._
4 | import perfolation._
5 | import rapid.Task
6 | import scribe.LogRecord
7 | import scribe.mdc.MDC
8 | import scribe.output.LogOutput
9 | import scribe.output.format.OutputFormat
10 | import scribe.writer.Writer
11 | import spice.http.HttpResponse
12 | import spice.http.client.HttpClient
13 | import spice.http.content.Content
14 | import spice.net.{ContentType, URL}
15 |
16 | case class LogstashWriter(url: URL,
17 | service: String,
18 | additionalFields: Map[String, String] = Map.empty,
19 | asynchronous: Boolean = true) extends Writer {
20 | private lazy val client = HttpClient.url(url).post
21 |
22 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
23 | val task = log(record) // Does nothing
24 | if (!asynchronous) {
25 | task.sync()
26 | }
27 | }
28 |
29 | def log(record: LogRecord): Task[HttpResponse] = {
30 | val l = record.timeStamp
31 | val timestamp = s"${l.t.F}T${l.t.T}.${l.t.L}${l.t.z}"
32 | val r: LogstashRecord = LogstashRecord(
33 | messages = record.messages.map(_.logOutput.plainText),
34 | service = service,
35 | level = record.level.name,
36 | value = record.levelValue,
37 | fileName = record.fileName,
38 | className = record.className,
39 | methodName = record.methodName,
40 | line = record.line,
41 | thread = record.thread.getName,
42 | `@timestamp` = timestamp,
43 | mdc = MDC.map.map {
44 | case (key, function) => key -> function().toString
45 | },
46 | data = record.data.map {
47 | case (key, function) => key -> function().toString
48 | }
49 | )
50 |
51 | val json = r.json
52 | val additional = additionalFields.json
53 |
54 | val content = Content.json(json.merge(additional))
55 | client.content(content).send()
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/migration/src/main/resources/moduload.list:
--------------------------------------------------------------------------------
1 | scribe.Log4JMigration
--------------------------------------------------------------------------------
/migration/src/test/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | log4j.rootCategory=DEBUG, console
2 |
3 | log4j.appender.console = org.apache.log4j.ConsoleAppender
4 | log4j.appender.console.layout = org.apache.log4j.PatternLayout
5 | log4j.appender.console.layout.ConversionPattern = %d{yyyy/MM/dd HH:mm:ss.SSS} [%p] [%c] [%t] %m%n
6 |
7 | log4j.logger.com.example.one = OFF
8 | log4j.logger.com.example.two = TRACE
9 | log4j.logger.com.example.three = DEBUG
10 | log4j.logger.com.example.four = INFO
11 | log4j.logger.com.example.five = WARN
12 | log4j.logger.com.example.six = ERROR
13 | log4j.logger.com.example.seven = FATAL
14 | log4j.logger.com.example = WARN
--------------------------------------------------------------------------------
/migration/src/test/resources/logback.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | true
6 |
7 |
8 |
9 |
10 |
11 | System.out
12 |
13 | %date{dd-MM-yyyy-HH:mm:ss.SSS} %-5level[%thread] %logger{40} - %msg%n
14 |
15 |
16 |
17 |
18 | var/log/application.log
19 |
20 | 20
21 | var/log/application.log.%i
22 |
23 |
24 | 100MB
25 |
26 |
27 | %date{dd-MM-yyyy-HH:mm:ss.SSS} %-5level[%thread] %logger{1000} - %msg%n
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/migration/src/test/scala/spec/Log4JMigrationSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import scribe.Log4JMigration
6 |
7 | import scala.language.implicitConversions
8 |
9 | class Log4JMigrationSpec extends AnyWordSpec with Matchers {
10 | "Log4JMigration" should {
11 | "load existing configuration" in {
12 | Log4JMigration() should be(10)
13 | }
14 | }
15 | }
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.10.11
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2")
2 |
3 | addSbtPlugin("org.portable-scala" % "sbt-scala-native-crossproject" % "1.3.2")
4 |
5 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.18.2")
6 |
7 | addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.5.7")
8 |
9 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.12.2")
10 |
11 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.3.1")
12 |
13 | addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.7")
14 |
15 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.3.1")
16 |
17 | addSbtPlugin("com.github.sbt" % "sbt-git" % "2.1.0")
18 |
19 | addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.6.5" )
20 |
--------------------------------------------------------------------------------
/publish.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | sbt +clean
6 | sbt +compile
7 | sbt +test
8 | sbt docs/mdoc
9 | sbt +publishSigned
10 | sbt sonatypeBundleRelease
--------------------------------------------------------------------------------
/slack/src/main/scala/scribe/slack/Slack.scala:
--------------------------------------------------------------------------------
1 | package scribe.slack
2 |
3 | import fabric.Json
4 | import fabric.rw._
5 | import rapid.Task
6 | import scribe.format._
7 | import scribe.handler.LogHandler
8 | import scribe.{Level, Logger}
9 | import spice.http.HttpResponse
10 | import spice.http.client.HttpClient
11 | import spice.http.content.Content
12 | import spice.net.URL
13 |
14 | class Slack(serviceHash: String, botName: String) {
15 | private lazy val client = HttpClient.url(URL.parse(s"https://hooks.slack.com/services/$serviceHash")).post
16 |
17 | def request(message: String,
18 | markdown: Boolean = true,
19 | attachments: List[Slack.Attachment] = Nil,
20 | emojiIcon: String = ":fire:"): Task[HttpResponse] = {
21 | val m = SlackMessage(
22 | text = message,
23 | username = botName,
24 | mrkdwn = markdown,
25 | icon_emoji = emojiIcon,
26 | attachments = attachments
27 | )
28 | val json = m.json
29 | val content = Content.json(json)
30 | client.content(content).send()
31 | }
32 | }
33 |
34 | object Slack {
35 | case class Attachment(title: String, text: String)
36 |
37 | object Attachment {
38 | implicit val rw: RW[Attachment] = RW.gen
39 | }
40 |
41 | def configure(serviceHash: String,
42 | botName: String,
43 | emojiIcon: String = ":fire:",
44 | loggerName: String = "slack",
45 | level: Level = Level.Error): Unit = {
46 | val slack = new Slack(serviceHash, botName)
47 | val formatter = formatter"[$threadName] $levelPaddedRight $positionAbbreviated - $messages"
48 |
49 | val handler = LogHandler(
50 | minimumLevel = Some(level),
51 | writer = new SlackWriter(slack, emojiIcon),
52 | formatter = formatter
53 | )
54 | Logger(loggerName).withHandler(handler).replace()
55 | }
56 | }
--------------------------------------------------------------------------------
/slack/src/main/scala/scribe/slack/SlackMessage.scala:
--------------------------------------------------------------------------------
1 | package scribe.slack
2 |
3 | import fabric.rw._
4 |
5 | case class SlackMessage(text: String,
6 | username: String,
7 | mrkdwn: Boolean,
8 | icon_emoji: String,
9 | attachments: List[Slack.Attachment])
10 |
11 | object SlackMessage {
12 | implicit val rw: RW[SlackMessage] = RW.gen
13 | }
--------------------------------------------------------------------------------
/slack/src/main/scala/scribe/slack/SlackWriter.scala:
--------------------------------------------------------------------------------
1 | package scribe.slack
2 |
3 | import scribe.LogRecord
4 | import scribe.output.LogOutput
5 | import scribe.output.format.OutputFormat
6 | import scribe.writer.Writer
7 |
8 | /**
9 | * SlackWriter is
10 | *
11 | * @param slack Slack instance
12 | * @param emojiIcon the emoji to use when sending messages
13 | */
14 | class SlackWriter(slack: Slack, emojiIcon: String) extends Writer {
15 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = slack.request(
16 | message = output.plainText,
17 | emojiIcon = emojiIcon
18 | ).start()
19 | }
20 |
--------------------------------------------------------------------------------
/slf4j/src/main/java/org/slf4j/impl/StaticMDCBinder.java:
--------------------------------------------------------------------------------
1 | package org.slf4j.impl;
2 |
3 | import org.slf4j.spi.MDCAdapter;
4 | import scribe.slf4j.ScribeMDCAdapter;
5 | import scribe.slf4j.ScribeMDCAdapter$;
6 |
7 | public class StaticMDCBinder {
8 | public static final StaticMDCBinder SINGLETON = new StaticMDCBinder();
9 |
10 | private StaticMDCBinder() {
11 | }
12 |
13 | public MDCAdapter getMDCA() {
14 | return ScribeMDCAdapter$.MODULE$;
15 | }
16 |
17 | public String getMDCAdapterClassStr() {
18 | return ScribeMDCAdapter.class.getName();
19 | }
20 | }
--------------------------------------------------------------------------------
/slf4j/src/main/scala/org/slf4j/impl/StaticLoggerBinder.scala:
--------------------------------------------------------------------------------
1 | package org.slf4j.impl
2 |
3 | import org.slf4j.ILoggerFactory
4 | import org.slf4j.spi.LoggerFactoryBinder
5 | import scribe.slf4j.ScribeLoggerFactory
6 |
7 | class StaticLoggerBinder private() extends LoggerFactoryBinder {
8 | private val factory = new ScribeLoggerFactory
9 | private val classString = classOf[ScribeLoggerFactory].getName
10 |
11 | override def getLoggerFactory: ILoggerFactory = factory
12 |
13 | override def getLoggerFactoryClassStr: String = classString
14 | }
15 |
16 | object StaticLoggerBinder extends StaticLoggerBinder {
17 | val REQUESTED_API_VERSION = "1.7.15"
18 |
19 | def getSingleton: StaticLoggerBinder = this
20 | }
--------------------------------------------------------------------------------
/slf4j/src/main/scala/scribe/slf4j/SLF4JHelper.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.helpers.FormattingTuple
4 | import scribe.message.LoggableMessage
5 | import scribe._
6 |
7 | object SLF4JHelper {
8 | def log(name: String, level: Level, msg: String, t: Option[Throwable]): Unit = {
9 | val scribeLogger = scribe.Logger(name)
10 | val messages: List[LoggableMessage] = LoggableMessage.string2LoggableMessage(msg) ::
11 | LoggableMessage.throwableList2Messages(t.toList)
12 | val record = LogRecord(
13 | level = level,
14 | levelValue = level.value,
15 | messages = messages,
16 | fileName = "",
17 | className = name,
18 | methodName = None,
19 | line = None,
20 | column = None
21 | )
22 | scribeLogger.log(record)
23 | }
24 |
25 | def logTuple(name: String, level: Level, tuple: FormattingTuple): Unit = {
26 | log(name, level, tuple.getMessage, Option(tuple.getThrowable))
27 | }
28 |
29 | def includes(name: String, level: Level): Boolean = scribe.Logger(name).includes(level)
30 | }
31 |
--------------------------------------------------------------------------------
/slf4j/src/main/scala/scribe/slf4j/ScribeLoggerFactory.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.{ILoggerFactory, Logger, ScribeLoggerAdapter}
4 |
5 | import java.util.concurrent.ConcurrentHashMap
6 |
7 | class ScribeLoggerFactory extends ILoggerFactory {
8 | private val map = new ConcurrentHashMap[String, Logger]
9 |
10 | override def getLogger(name: String): Logger = {
11 | val loggerName = if (name.equalsIgnoreCase(Logger.ROOT_LOGGER_NAME)) {
12 | ""
13 | } else {
14 | name
15 | }
16 | Option(map.get(loggerName)) match {
17 | case Some(logger) => logger
18 | case None => {
19 | val adapter = new ScribeLoggerAdapter(loggerName)
20 | val old = map.putIfAbsent(loggerName, adapter)
21 | Option(old) match {
22 | case Some(a) => a
23 | case None => adapter
24 | }
25 | }
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/slf4j/src/main/scala/scribe/slf4j/ScribeMDCAdapter.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.spi.MDCAdapter
4 | import scribe.mdc.MDC
5 |
6 | import java.util
7 | import scala.jdk.CollectionConverters._
8 |
9 | object ScribeMDCAdapter extends MDCAdapter {
10 | override def put(key: String, `val`: String): Unit = MDC(key) = `val`
11 |
12 | override def get(key: String): String = MDC.get(key).map(_.toString).orNull
13 |
14 | override def remove(key: String): Unit = MDC.remove(key)
15 |
16 | override def clear(): Unit = MDC.clear()
17 |
18 | override def getCopyOfContextMap: util.Map[String, String] = MDC.map.map {
19 | case (key, function) => key -> function().toString
20 | }.asJava
21 |
22 | override def setContextMap(contextMap: util.Map[String, String]): Unit = {
23 | clear()
24 | contextMap.asScala.foreach {
25 | case (key, value) => put(key, value)
26 | }
27 | }
28 | }
--------------------------------------------------------------------------------
/slf4j/src/test/scala/spec/SLF4JSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import org.slf4j.{LoggerFactory, MDC}
6 | import scribe.format._
7 | import scribe.handler.LogHandler
8 | import scribe.output.LogOutput
9 | import scribe.output.format.{ASCIIOutputFormat, OutputFormat}
10 | import scribe.util.Time
11 | import scribe.writer.Writer
12 | import scribe.{Level, LogRecord, Logger, format}
13 |
14 | import java.util.TimeZone
15 |
16 | class SLF4JSpec extends AnyWordSpec with Matchers {
17 | TimeZone.setDefault(TimeZone.getTimeZone("UTC"))
18 |
19 | private var logs: List[LogRecord] = Nil
20 | private var logOutput: List[String] = Nil
21 | private val writer = new Writer {
22 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
23 | logs = record :: logs
24 | logOutput = output.plainText :: logOutput
25 | }
26 | }
27 | private val recordHolder = LogHandler(
28 | writer = writer,
29 | minimumLevel = Some(Level.Info),
30 | formatter = formatter"$dateFull ${string("[")}$levelColoredPaddedRight${string("]")} ${green(position)} - ${format.messages}$mdc"
31 | )
32 |
33 | "SLF4J" should {
34 | TimeZone.setDefault(TimeZone.getTimeZone("America/Chicago"))
35 |
36 | "set the time to an arbitrary value" in {
37 | OutputFormat.default = ASCIIOutputFormat
38 | Time.function = () => 1542376191920L
39 | }
40 | "remove existing handlers from Root" in {
41 | Logger.root.clearHandlers().replace()
42 | }
43 | "add a testing handler" in {
44 | Logger.root.withHandler(recordHolder).replace()
45 | }
46 | "verify not records are in the RecordHolder" in {
47 | logs.isEmpty should be(true)
48 | }
49 | "log to Scribe" in {
50 | val logger = LoggerFactory.getLogger(getClass)
51 | logger.info("Hello World!")
52 | }
53 | "verify Scribe received the record" in {
54 | logs.size should be(1)
55 | val r = logs.head
56 | r.level should be(Level.Info)
57 | r.logOutput.plainText should be("Hello World!")
58 | r.className should be("spec.SLF4JSpec")
59 | logs = Nil
60 | }
61 | "verify Scribe wrote value" in {
62 | logOutput.size should be(1)
63 | val s = logOutput.head
64 | s should be("2018.11.16 07:49:51:920 [INFO ] spec.SLF4JSpec - Hello World!")
65 | }
66 | "use MDC" in {
67 | MDC.put("name", "John Doe")
68 | val logger = LoggerFactory.getLogger(getClass)
69 | logger.info("A generic name")
70 | logOutput.head should be("2018.11.16 07:49:51:920 [INFO ] spec.SLF4JSpec - A generic name (name: John Doe)")
71 | }
72 | "clear MDC" in {
73 | MDC.clear()
74 | val logger = LoggerFactory.getLogger(getClass)
75 | logger.info("MDC cleared")
76 | logOutput.head should be("2018.11.16 07:49:51:920 [INFO ] spec.SLF4JSpec - MDC cleared")
77 | }
78 | "make sure logging nulls doesn't error" in {
79 | val logger = LoggerFactory.getLogger(getClass)
80 | logger.error(null)
81 | logs.length should be(3)
82 | logOutput.head should be("2018.11.16 07:49:51:920 [ERROR] spec.SLF4JSpec - null")
83 | }
84 | }
85 | }
--------------------------------------------------------------------------------
/slf4j2/src/main/resources/META-INF/services/org.slf4j.spi.SLF4JServiceProvider:
--------------------------------------------------------------------------------
1 | scribe.slf4j.ScribeServiceProvider
--------------------------------------------------------------------------------
/slf4j2/src/main/scala/scribe/slf4j/SLF4JHelper.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.helpers.FormattingTuple
4 | import scribe.message.LoggableMessage
5 | import scribe.{Level, LogRecord}
6 |
7 | object SLF4JHelper {
8 | def log(name: String, level: Level, msg: String, t: Option[Throwable]): Unit = {
9 | val scribeLogger = scribe.Logger(name)
10 | val messages: List[LoggableMessage] = LoggableMessage.string2LoggableMessage(msg) ::
11 | LoggableMessage.throwableList2Messages(t.toList)
12 | val record = LogRecord(
13 | level = level,
14 | levelValue = level.value,
15 | messages = messages,
16 | fileName = "",
17 | className = name,
18 | methodName = None,
19 | line = None,
20 | column = None
21 | )
22 | scribeLogger.log(record)
23 | }
24 |
25 | def logTuple(name: String, level: Level, tuple: FormattingTuple): Unit = {
26 | log(name, level, tuple.getMessage, Option(tuple.getThrowable))
27 | }
28 |
29 | def includes(name: String, level: Level): Boolean = scribe.Logger(name).includes(level)
30 | }
31 |
--------------------------------------------------------------------------------
/slf4j2/src/main/scala/scribe/slf4j/ScribeLoggerFactory.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.{ILoggerFactory, Logger, ScribeLoggerAdapter}
4 |
5 | import java.util.concurrent.ConcurrentHashMap
6 |
7 | object ScribeLoggerFactory extends ILoggerFactory {
8 | private lazy val map = new ConcurrentHashMap[String, Logger]()
9 |
10 | override def getLogger(name: String): Logger = Option(map.get(name)) match {
11 | case Some(logger) => logger
12 | case None =>
13 | val logger = new ScribeLoggerAdapter(name)
14 | val oldInstance = map.putIfAbsent(name, logger)
15 | Option(oldInstance).getOrElse(logger)
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/slf4j2/src/main/scala/scribe/slf4j/ScribeMDCAdapter.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.spi.MDCAdapter
4 | import scribe.mdc.MDC
5 |
6 | import java.util
7 | import scala.jdk.CollectionConverters._
8 |
9 | object ScribeMDCAdapter extends MDCAdapter {
10 | override def put(key: String, `val`: String): Unit = MDC(key) = `val`
11 |
12 | override def get(key: String): String = MDC.get(key).map(_.toString).orNull
13 |
14 | override def remove(key: String): Unit = MDC.remove(key)
15 |
16 | override def clear(): Unit = MDC.clear()
17 |
18 | override def getCopyOfContextMap: util.Map[String, String] = MDC.map.map {
19 | case (key, function) => key -> function().toString
20 | }.asJava
21 |
22 | override def setContextMap(contextMap: util.Map[String, String]): Unit = {
23 | clear()
24 | contextMap.asScala.foreach {
25 | case (key, value) => put(key, value)
26 | }
27 | }
28 |
29 | // TODO: Support stacking
30 | override def pushByKey(key: String, value: String): Unit = put(key, value)
31 |
32 | override def popByKey(key: String): String = {
33 | val value = get(key)
34 | remove(key)
35 | value
36 | }
37 |
38 | override def getCopyOfDequeByKey(key: String): util.Deque[String] = ???
39 |
40 | override def clearDequeByKey(key: String): Unit = remove(key)
41 | }
--------------------------------------------------------------------------------
/slf4j2/src/main/scala/scribe/slf4j/ScribeServiceProvider.scala:
--------------------------------------------------------------------------------
1 | package scribe.slf4j
2 |
3 | import org.slf4j.helpers.BasicMarkerFactory
4 | import org.slf4j.spi.{MDCAdapter, SLF4JServiceProvider}
5 | import org.slf4j.{ILoggerFactory, IMarkerFactory}
6 |
7 | class ScribeServiceProvider extends SLF4JServiceProvider {
8 | private lazy val markerFactory = new BasicMarkerFactory
9 |
10 | override def getLoggerFactory: ILoggerFactory = ScribeLoggerFactory
11 |
12 | override def getMarkerFactory: IMarkerFactory = markerFactory
13 |
14 | override def getMDCAdapter: MDCAdapter = ScribeMDCAdapter
15 |
16 | override def getRequestedApiVersion: String = "2.0.17"
17 |
18 | override def initialize(): Unit = {}
19 | }
--------------------------------------------------------------------------------
/slf4j2/src/test/scala/spec/SLF4JSpec.scala:
--------------------------------------------------------------------------------
1 | package spec
2 |
3 | import org.scalatest.matchers.should.Matchers
4 | import org.scalatest.wordspec.AnyWordSpec
5 | import org.slf4j.{LoggerFactory, MDC}
6 | import scribe.format._
7 | import scribe.handler.LogHandler
8 | import scribe.output.LogOutput
9 | import scribe.output.format.{ASCIIOutputFormat, OutputFormat}
10 | import scribe.util.Time
11 | import scribe.writer.Writer
12 | import scribe.{Level, LogRecord, Logger, format}
13 |
14 | import java.util.TimeZone
15 |
16 | class SLF4JSpec extends AnyWordSpec with Matchers {
17 | TimeZone.setDefault(TimeZone.getTimeZone("UTC"))
18 |
19 | private var logs: List[LogRecord] = Nil
20 | private var logOutput: List[String] = Nil
21 | private val writer = new Writer {
22 | override def write(record: LogRecord, output: LogOutput, outputFormat: OutputFormat): Unit = {
23 | logs = record :: logs
24 | logOutput = output.plainText :: logOutput
25 | }
26 | }
27 | private val recordHolder = LogHandler(
28 | writer = writer,
29 | minimumLevel = Some(Level.Info),
30 | formatter = formatter"$dateFull ${string("[")}$levelColoredPaddedRight${string("]")} ${green(position)} - ${format.messages}$mdc"
31 | )
32 |
33 | "SLF4J" should {
34 | TimeZone.setDefault(TimeZone.getTimeZone("America/Chicago"))
35 |
36 | "set the time to an arbitrary value" in {
37 | OutputFormat.default = ASCIIOutputFormat
38 | Time.function = () => 1542376191920L
39 | }
40 | "remove existing handlers from Root" in {
41 | Logger.root.clearHandlers().replace()
42 | }
43 | "add a testing handler" in {
44 | Logger.root.withHandler(recordHolder).replace()
45 | }
46 | "verify not records are in the RecordHolder" in {
47 | logs.isEmpty should be(true)
48 | }
49 | "log to Scribe" in {
50 | val logger = LoggerFactory.getLogger(getClass)
51 | logger.info("Hello World!")
52 | }
53 | "verify Scribe received the record" in {
54 | logs.size should be(1)
55 | val r = logs.head
56 | r.level should be(Level.Info)
57 | r.logOutput.plainText should be("Hello World!")
58 | r.className should be("spec.SLF4JSpec")
59 | logs = Nil
60 | }
61 | "verify Scribe wrote value" in {
62 | logOutput.size should be(1)
63 | val s = logOutput.head
64 | s should be("2018.11.16 07:49:51:920 [INFO ] spec.SLF4JSpec - Hello World!")
65 | }
66 | "use MDC" in {
67 | MDC.put("name", "John Doe")
68 | val logger = LoggerFactory.getLogger(getClass)
69 | logger.info("A generic name")
70 | logOutput.head should be("2018.11.16 07:49:51:920 [INFO ] spec.SLF4JSpec - A generic name (name: John Doe)")
71 | }
72 | "clear MDC" in {
73 | MDC.clear()
74 | val logger = LoggerFactory.getLogger(getClass)
75 | logger.info("MDC cleared")
76 | logOutput.head should be("2018.11.16 07:49:51:920 [INFO ] spec.SLF4JSpec - MDC cleared")
77 | }
78 | "make sure logging nulls doesn't error" in {
79 | val logger = LoggerFactory.getLogger(getClass)
80 | logger.error(null)
81 | logs.length should be(3)
82 | logOutput.head should be("2018.11.16 07:49:51:920 [ERROR] spec.SLF4JSpec - null")
83 | }
84 | }
85 | }
--------------------------------------------------------------------------------
/test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | sbt +clean +compile
6 | sbt +test
--------------------------------------------------------------------------------
/work/benchmark/2018.01.31.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 200 1533.194 ± 18.887 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 200 28427.100 ± 98.948 ns/op
4 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 200 19867.522 ± 95.992 ns/op
5 | [info] LoggingSpeedBenchmark.withScribe avgt 200 824.204 ± 7.595 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2018.04.24.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 200 1706.235 ± 9.163 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 200 28388.901 ± 100.856 ns/op
4 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 200 19773.381 ± 39.093 ns/op
5 | [info] LoggingSpeedBenchmark.withScribe avgt 200 576.088 ± 9.547 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2018.05.17.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 200 1675.343 ± 13.589 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 200 28214.753 ± 69.518 ns/op
4 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 200 19611.053 ± 43.314 ns/op
5 | [info] LoggingSpeedBenchmark.withScribe avgt 200 558.674 ± 5.221 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2018.08.27.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 200 1702.785 ± 6.943 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 200 28612.637 ± 148.614 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4s avgt 200 19469.903 ± 64.273 ns/op
5 | [info] LoggingSpeedBenchmark.withLogback avgt 200 21653.772 ± 124.929 ns/op
6 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 200 19894.954 ± 85.863 ns/op
7 | [info] LoggingSpeedBenchmark.withScribe avgt 200 470.610 ± 5.874 ns/op
8 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 200 60.668 ± 1.783 ns/op
9 | [info] LoggingSpeedBenchmark.withTinyLog avgt 200 3679.658 ± 11.007 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2018.12.02.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 200 1701.044 ± 11.777 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 200 28466.191 ± 63.818 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4s avgt 200 19485.302 ± 41.434 ns/op
5 | [info] LoggingSpeedBenchmark.withLogback avgt 200 21443.637 ± 39.421 ns/op
6 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 200 19813.989 ± 40.043 ns/op
7 | [info] LoggingSpeedBenchmark.withScribe avgt 200 645.822 ± 9.181 ns/op
8 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 200 57.716 ± 0.264 ns/op
9 | [info] LoggingSpeedBenchmark.withTinyLog avgt 200 3637.637 ± 14.074 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2019.03.28.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 200 1970.845 ± 5.986 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 200 29868.076 ± 66.867 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4s avgt 200 20483.704 ± 82.583 ns/op
5 | [info] LoggingSpeedBenchmark.withLogback avgt 200 22551.549 ± 53.235 ns/op
6 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 200 22669.689 ± 550.312 ns/op
7 | [info] LoggingSpeedBenchmark.withScribe avgt 200 796.542 ± 10.086 ns/op
8 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 200 60.361 ± 1.409 ns/op
9 | [info] LoggingSpeedBenchmark.withTinyLog avgt 200 4583.093 ± 165.590 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2020.10.01.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 25 1454.740 ± 18.645 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 25 8612.233 ± 70.006 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4s avgt 25 10527.774 ± 107.905 ns/op
5 | [info] LoggingSpeedBenchmark.withLogback avgt 25 11851.383 ± 91.096 ns/op
6 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 25 10715.173 ± 57.526 ns/op
7 | [info] LoggingSpeedBenchmark.withScribe avgt 25 614.416 ± 15.219 ns/op
8 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 25 60.737 ± 2.476 ns/op
9 | [info] LoggingSpeedBenchmark.withTinyLog avgt 25 3209.189 ± 39.671 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2021.10.08.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 3 5715.226 ± 1027.999 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 3 19260.492 ± 5071.515 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4s avgt 3 17597.022 ± 3168.124 ns/op
5 | [info] LoggingSpeedBenchmark.withLogback avgt 3 19968.908 ± 1143.416 ns/op
6 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 3 17441.742 ± 4572.890 ns/op
7 | [info] LoggingSpeedBenchmark.withScribe avgt 3 1590.503 ± 238.508 ns/op
8 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 3 110.954 ± 14.417 ns/op
9 | [info] LoggingSpeedBenchmark.withTinyLog avgt 3 7303.232 ± 976.291 ns/op
10 | [info] PerformanceBenchmark.withDebug avgt 3 71.612 ± 1.376 ns/op
11 | [info] PerformanceBenchmark.withError avgt 3 769.497 ± 11.625 ns/op
12 | [info] PerformanceBenchmark.withInfo avgt 3 649.201 ± 3.585 ns/op
13 | [info] PerformanceBenchmark.withTrace avgt 3 72.826 ± 0.380 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2021.12.29.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4j avgt 3 4267.711 ± 1346.134 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 3 20254.688 ± 4548.778 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4s avgt 3 20601.715 ± 4759.939 ns/op
5 | [info] LoggingSpeedBenchmark.withLogback avgt 3 23199.957 ± 1216.642 ns/op
6 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 3 21313.132 ± 7503.052 ns/op
7 | [info] LoggingSpeedBenchmark.withScribe avgt 3 994.876 ± 954.153 ns/op
8 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 3 81.562 ± 8.487 ns/op
9 | [info] LoggingSpeedBenchmark.withTinyLog avgt 3 4734.004 ± 1580.818 ns/op
10 | [info] PerformanceBenchmark.withDebug avgt 3 103.760 ± 3.195 ns/op
11 | [info] PerformanceBenchmark.withError avgt 3 787.559 ± 47.097 ns/op
12 | [info] PerformanceBenchmark.withInfo avgt 3 892.496 ± 139.037 ns/op
13 | [info] PerformanceBenchmark.withTrace avgt 3 103.599 ± 5.367 ns/op
--------------------------------------------------------------------------------
/work/benchmark/2022.02.06.benchmarks.txt:
--------------------------------------------------------------------------------
1 | [info] Benchmark Mode Cnt Score Error Units
2 | [info] LoggingSpeedBenchmark.withLog4cats avgt 3 10891.085 ± 1261.554 ns/op
3 | [info] LoggingSpeedBenchmark.withLog4j avgt 3 4026.601 ± 596.659 ns/op
4 | [info] LoggingSpeedBenchmark.withLog4jTrace avgt 3 10557.221 ± 273.787 ns/op
5 | [info] LoggingSpeedBenchmark.withLog4s avgt 3 8816.670 ± 847.436 ns/op
6 | [info] LoggingSpeedBenchmark.withLogback avgt 3 11852.885 ± 8699.272 ns/op
7 | [info] LoggingSpeedBenchmark.withScalaLogging avgt 3 9044.451 ± 2077.840 ns/op
8 | [info] LoggingSpeedBenchmark.withScribe avgt 3 1622.356 ± 89.906 ns/op
9 | [info] LoggingSpeedBenchmark.withScribeAsync avgt 3 64.308 ± 0.696 ns/op
10 | [info] LoggingSpeedBenchmark.withScribeEffect avgt 3 1744.555 ± 91.595 ns/op
11 | [info] LoggingSpeedBenchmark.withScribeEffectParallel avgt 3 2691.038 ± 66.149 ns/op
12 | [info] LoggingSpeedBenchmark.withTinyLog avgt 3 4019.371 ± 2791.905 ns/op
13 | [info] PerformanceBenchmark.withDebug avgt 3 45.720 ± 0.539 ns/op
14 | [info] PerformanceBenchmark.withError avgt 3 142.844 ± 0.233 ns/op
15 | [info] PerformanceBenchmark.withInfo avgt 3 142.416 ± 1.923 ns/op
16 | [info] PerformanceBenchmark.withTrace avgt 3 48.171 ± 0.080 ns/op
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-all-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-all-lines.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-all.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-all.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-log4j-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-log4j-lines.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-log4j.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-log4j.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-scala-logging-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-scala-logging-lines.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-scala-logging.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-scala-logging.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-trace-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-trace-lines.png
--------------------------------------------------------------------------------
/work/images/2018.01.31.benchmark-trace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.01.31.benchmark-trace.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-all-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-all-lines.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-all.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-all.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-async-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-async-lines.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-async.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-async.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-log4j-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-log4j-lines.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-log4j-trace-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-log4j-trace-lines.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-log4j-trace.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-log4j-trace.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-log4j.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-log4j.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-scala-logging-lines.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-scala-logging-lines.png
--------------------------------------------------------------------------------
/work/images/2018.08.28.benchmark-scala-logging.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2018.08.28.benchmark-scala-logging.png
--------------------------------------------------------------------------------
/work/images/2022.06.30.mdc-output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/2022.06.30.mdc-output.png
--------------------------------------------------------------------------------
/work/images/color-logs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/color-logs.png
--------------------------------------------------------------------------------
/work/images/output-colors.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/outr/scribe/2137cc5138923327718aff9f61aa94071ef60a2f/work/images/output-colors.png
--------------------------------------------------------------------------------