├── .github
├── CODEOWNERS
└── workflows
│ ├── sbt-dependency-graph.yaml
│ └── build.yml
├── play-v28
└── src
├── project
├── build.properties
└── plugins.sbt
├── version.sbt
├── scripts
├── build.sh
└── update-lib.sh
├── common
├── lib
│ ├── native-linux-armv7-1.18.0.jar
│ ├── native-linux-s390x-1.18.0.jar
│ ├── native-osx-aarch64-1.18.0.jar
│ ├── native-osx-x86_64-1.18.0.jar
│ ├── native-linux-aarch64-1.18.0.jar
│ ├── native-linux-ppc64le-1.18.0.jar
│ ├── native-linux-riscv64-1.18.0.jar
│ ├── native-linux-x86_64-1.18.0.jar
│ ├── native-windows-aarch64-1.18.0.jar
│ └── native-windows-x86_64-1.18.0.jar
└── src
│ └── main
│ └── java
│ └── Brotli.java
├── .gitignore
├── play-v29
└── src
│ ├── main
│ ├── resources
│ │ └── reference.conf
│ └── scala
│ │ ├── play.api.libs.streams
│ │ └── BrotliFlow.scala
│ │ └── play.filters.brotli
│ │ └── BrotliFilter.scala
│ └── test
│ ├── resources
│ └── logback-test.xml
│ └── scala
│ └── play.filters.brotli
│ └── BrotliFilterSpec.scala
├── play-v30
└── src
│ ├── main
│ ├── resources
│ │ └── reference.conf
│ └── scala
│ │ ├── play.api.libs.streams
│ │ └── BrotliFlow.scala
│ │ └── play.filters.brotli
│ │ └── BrotliFilter.scala
│ └── test
│ ├── resources
│ └── logback-test.xml
│ └── scala
│ └── play.filters.brotli
│ └── BrotliFilterSpec.scala
├── akka
└── src
│ ├── test
│ └── scala
│ │ └── akka.stream.io.compression.brotli
│ │ ├── CompressionTestingTools.scala
│ │ ├── BrotliSpec.scala
│ │ ├── GzipSpec.scala
│ │ ├── CodecSpecSupport.scala
│ │ └── CoderSpec.scala
│ └── main
│ └── scala
│ ├── akka.stream.io.compression.brotli
│ ├── BrotliDecompressor.scala
│ └── BrotliCompressor.scala
│ └── akka.stream.scaladsl
│ └── BrotliCompression.scala
├── pekko
└── src
│ ├── test
│ └── scala
│ │ └── org.apache.pekko.stream.io.compression.brotli
│ │ ├── CompressionTestingTools.scala
│ │ ├── BrotliSpec.scala
│ │ ├── GzipSpec.scala
│ │ ├── CodecSpecSupport.scala
│ │ └── CoderSpec.scala
│ └── main
│ └── scala
│ ├── org.apache.pekko.stream.io.compression.brotli
│ ├── BrotliDecompressor.scala
│ └── BrotliCompressor.scala
│ └── org.apache.pekko.stream.scaladsl
│ └── BrotliCompression.scala
├── README.md
└── LICENSE
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @mchv
--------------------------------------------------------------------------------
/play-v28/src:
--------------------------------------------------------------------------------
1 | ../play-v29/src
--------------------------------------------------------------------------------
/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version=1.10.7
2 |
--------------------------------------------------------------------------------
/version.sbt:
--------------------------------------------------------------------------------
1 | ThisBuild / version := "0.18.1-SNAPSHOT"
2 |
--------------------------------------------------------------------------------
/scripts/build.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sbt clean +compile +test
--------------------------------------------------------------------------------
/common/lib/native-linux-armv7-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-linux-armv7-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-linux-s390x-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-linux-s390x-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-osx-aarch64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-osx-aarch64-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-osx-x86_64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-osx-x86_64-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-linux-aarch64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-linux-aarch64-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-linux-ppc64le-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-linux-ppc64le-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-linux-riscv64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-linux-riscv64-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-linux-x86_64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-linux-x86_64-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-windows-aarch64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-windows-aarch64-1.18.0.jar
--------------------------------------------------------------------------------
/common/lib/native-windows-x86_64-1.18.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/guardian/play-brotli-filter/HEAD/common/lib/native-windows-x86_64-1.18.0.jar
--------------------------------------------------------------------------------
/project/plugins.sbt:
--------------------------------------------------------------------------------
1 | addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.2.1")
2 | addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.11.3")
3 | addSbtPlugin("com.github.sbt" % "sbt-release" % "1.0.15")
4 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.2.0")
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # SBT, bloop and metals
2 | .bloop
3 | .bsp
4 | .metals
5 | project/metals.sbt
6 |
7 | # vs code
8 | .vscode
9 |
10 | # scala 3
11 | .tasty
12 |
13 |
14 | # sbt
15 | project/project/
16 | project/target/
17 | target/
18 |
19 | # Ignore Mac DS_Store files
20 | .DS_Store
21 | **/.DS_Store
22 |
23 | .idea
--------------------------------------------------------------------------------
/play-v29/src/main/resources/reference.conf:
--------------------------------------------------------------------------------
1 | play.modules {
2 | enabled += "play.filters.brotli.BrotliFilterModule"
3 | }
4 |
5 | play.filters {
6 |
7 | # Brotli filter configuration
8 | brotli {
9 |
10 | # The compression-speed vs compression-density tradeoffs. The higher the quality, the slower the compression. Range is 0 to 11
11 | quality = 5
12 |
13 | # The maximum amount of content to buffer for compressing with brotli in order to calculate the content length before falling back
14 | # to chunked encoding.
15 | chunkedThreshold = 100k
16 |
17 | }
18 | }
--------------------------------------------------------------------------------
/play-v30/src/main/resources/reference.conf:
--------------------------------------------------------------------------------
1 | play.modules {
2 | enabled += "play.filters.brotli.BrotliFilterModule"
3 | }
4 |
5 | play.filters {
6 |
7 | # Brotli filter configuration
8 | brotli {
9 |
10 | # The compression-speed vs compression-density tradeoffs. The higher the quality, the slower the compression. Range is 0 to 11
11 | quality = 5
12 |
13 | # The maximum amount of content to buffer for compressing with brotli in order to calculate the content length before falling back
14 | # to chunked encoding.
15 | chunkedThreshold = 100k
16 |
17 | }
18 | }
--------------------------------------------------------------------------------
/scripts/update-lib.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | cd ../common/lib/
4 |
5 | version='1.18.0'
6 |
7 | for platform in 'native-osx-aarch64' \
8 | 'native-osx-x86_64' \
9 | 'native-windows-x86_64' \
10 | 'native-windows-aarch64' \
11 | 'native-linux-x86_64' \
12 | 'native-linux-aarch64' \
13 | 'native-linux-armv7' \
14 | 'native-linux-s390x' \
15 | 'native-linux-riscv64' \
16 | 'native-linux-ppc64le'
17 | do
18 | curl -OJ -# "https://repo1.maven.org/maven2/com/aayushatharva/brotli4j/${platform}/${version}/${platform}-${version}.jar"
19 | done
20 |
--------------------------------------------------------------------------------
/.github/workflows/sbt-dependency-graph.yaml:
--------------------------------------------------------------------------------
1 | name: Update Dependency Graph for SBT
2 | on:
3 | push:
4 | branches:
5 | - main
6 | workflow_dispatch:
7 | jobs:
8 | dependency-graph:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Checkout branch
12 | id: checkout
13 | uses: actions/checkout@v4
14 | - name: Setup SBT
15 | uses: sbt/setup-sbt@v1
16 | - name: Submit dependencies
17 | id: submit
18 | uses: scalacenter/sbt-dependency-submission@v3
19 | - name: Log snapshot for user validation
20 | id: validate
21 | run: cat ${{ steps.submit.outputs.snapshot-json-path }} | jq
22 | permissions:
23 | contents: write
24 |
--------------------------------------------------------------------------------
/play-v29/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | %level %logger{15} - %message%n%ex{full}
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/play-v30/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | %level %logger{15} - %message%n%ex{full}
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: build
2 | on:
3 | pull_request:
4 | workflow_dispatch:
5 | push:
6 | branches:
7 | - main
8 | jobs:
9 | build:
10 | name: Build
11 | strategy:
12 | matrix:
13 | os: [macos-14, macos-latest, ubuntu-latest, windows-latest]
14 | java: [ '11', '17', '21' ]
15 |
16 | runs-on: ${{ matrix.os }}
17 | steps:
18 | - name: Checkout
19 | uses: actions/checkout@v4
20 | with:
21 | show-progress: false
22 |
23 | - name: Setup java
24 | uses: actions/setup-java@v4
25 | with:
26 | distribution: 'adopt'
27 | java-version: ${{ matrix.java }}
28 | cache: 'sbt'
29 |
30 | - name: Setup SBT
31 | uses: sbt/setup-sbt@v1
32 |
33 | - name: Set local hostname
34 | if: runner.os == 'macOS'
35 | run: |
36 | sudo scutil --set HostName $(scutil --get LocalHostName)
37 |
38 | - name: Build
39 | run: |
40 | ./scripts/build.sh
41 | shell: bash
42 |
43 |
--------------------------------------------------------------------------------
/akka/src/test/scala/akka.stream.io.compression.brotli/CompressionTestingTools.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2018-2022 Lightbend Inc.
3 | */
4 |
5 | package akka.stream.io.compression.brotli
6 |
7 | import scala.annotation.tailrec
8 | import scala.concurrent.{ Await, ExecutionContext, Future }
9 | import scala.concurrent.duration.Duration
10 | import scala.util.{ Failure, Success }
11 |
12 | import akka.stream.Materializer
13 | import akka.stream.scaladsl.Source
14 | import akka.util.ByteString
15 | import akka.util.ccompat._
16 |
17 | // a few useful helpers copied over from akka-http
18 | @ccompatUsedUntil213
19 | object CompressionTestingTools {
20 | implicit class AddFutureAwaitResult[T](val future: Future[T]) extends AnyVal {
21 |
22 | /** "Safe" Await.result that doesn't throw away half of the stacktrace */
23 | def awaitResult(atMost: Duration): T = {
24 | Await.ready(future, atMost)
25 | future.value.get match {
26 | case Success(t) => t
27 | case Failure(ex) =>
28 | throw new RuntimeException(
29 | "Trying to await result of failed Future, see the cause for the original problem.",
30 | ex)
31 | }
32 | }
33 | }
34 | implicit class EnhancedByteStringTraversableOnce(val byteStrings: IterableOnce[ByteString]) extends AnyVal {
35 | def join: ByteString = byteStrings.iterator.foldLeft(ByteString.empty)(_ ++ _)
36 | }
37 | implicit class EnhancedByteStringSource[Mat](val byteStringStream: Source[ByteString, Mat]) extends AnyVal {
38 | def join(implicit materializer: Materializer): Future[ByteString] =
39 | byteStringStream.runFold(ByteString.empty)(_ ++ _)
40 | def utf8String(implicit materializer: Materializer, ec: ExecutionContext): Future[String] =
41 | join.map(_.utf8String)
42 | }
43 |
44 | implicit class EnhancedThrowable(val throwable: Throwable) extends AnyVal {
45 | def ultimateCause: Throwable = {
46 | @tailrec def rec(ex: Throwable): Throwable =
47 | if (ex.getCause == null) ex
48 | else rec(ex.getCause)
49 |
50 | rec(throwable)
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/pekko/src/test/scala/org.apache.pekko.stream.io.compression.brotli/CompressionTestingTools.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2018-2023 Lightbend Inc.
3 | */
4 |
5 | package org.apache.pekko.stream.io.compression.brotli
6 |
7 | import scala.annotation.tailrec
8 | import scala.concurrent.{ Await, ExecutionContext, Future }
9 | import scala.concurrent.duration.Duration
10 | import scala.util.{ Failure, Success }
11 |
12 | import org.apache.pekko.stream.Materializer
13 | import org.apache.pekko.stream.scaladsl.Source
14 | import org.apache.pekko.util.ByteString
15 | import org.apache.pekko.util.ccompat._
16 |
17 | // a few useful helpers copied over from pekko-http
18 | @ccompatUsedUntil213
19 | object CompressionTestingTools {
20 | implicit class AddFutureAwaitResult[T](val future: Future[T]) extends AnyVal {
21 |
22 | /** "Safe" Await.result that doesn't throw away half of the stacktrace */
23 | def awaitResult(atMost: Duration): T = {
24 | Await.ready(future, atMost)
25 | future.value.get match {
26 | case Success(t) => t
27 | case Failure(ex) =>
28 | throw new RuntimeException(
29 | "Trying to await result of failed Future, see the cause for the original problem.",
30 | ex)
31 | }
32 | }
33 | }
34 | implicit class EnhancedByteStringTraversableOnce(val byteStrings: IterableOnce[ByteString]) extends AnyVal {
35 | def join: ByteString = byteStrings.iterator.foldLeft(ByteString.empty)(_ ++ _)
36 | }
37 | implicit class EnhancedByteStringSource[Mat](val byteStringStream: Source[ByteString, Mat]) extends AnyVal {
38 | def join(implicit materializer: Materializer): Future[ByteString] =
39 | byteStringStream.runFold(ByteString.empty)(_ ++ _)
40 | def utf8String(implicit materializer: Materializer, ec: ExecutionContext): Future[String] =
41 | join.map(_.utf8String)
42 | }
43 |
44 | implicit class EnhancedThrowable(val throwable: Throwable) extends AnyVal {
45 | def ultimateCause: Throwable = {
46 | @tailrec def rec(ex: Throwable): Throwable =
47 | if (ex.getCause == null) ex
48 | else rec(ex.getCause)
49 |
50 | rec(throwable)
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/akka/src/main/scala/akka.stream.io.compression.brotli/BrotliDecompressor.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2022 Lightbend Inc.
3 | * - Mariot Chauvin
4 | */
5 |
6 | package akka.stream.io.compression.brotli
7 |
8 | import java.nio.ByteBuffer
9 | import java.util.zip.Inflater
10 | import java.util.zip.ZipException
11 |
12 | import com.aayushatharva.brotli4j.decoder.DirectDecompress
13 | import com.aayushatharva.brotli4j.decoder.DecoderJNI
14 |
15 | import akka.annotation.InternalApi
16 | import akka.stream.Attributes
17 | import akka.stream.impl.io.ByteStringParser
18 | import akka.stream.impl.io.ByteStringParser.{ ParseResult, ParseStep }
19 | import akka.util.ByteString
20 |
21 | class BrotliDecompressor extends ByteStringParser[ByteString] {
22 |
23 | class DecompressorParsingLogic extends ParsingLogic {
24 |
25 | case object DecompressStep extends ParseStep[ByteString] {
26 |
27 | private def fail(msg: String) = throw new ZipException(msg)
28 |
29 | override def onTruncation(): Unit = failStage(new ZipException("Truncated Brotli stream"))
30 |
31 | override def parse(reader: ByteStringParser.ByteReader): ParseResult[ByteString] = {
32 | if (!reader.hasRemaining) {
33 | ParseResult(None, ByteStringParser.FinishedParser, true)
34 | } else {
35 | val data = reader.remainingData.toArrayUnsafe()
36 | val directDecompress = DirectDecompress.decompress(data)
37 | reader.skip(reader.remainingSize)
38 |
39 | val status = directDecompress.getResultStatus()
40 | if (status == DecoderJNI.Status.DONE) {
41 | val outcome = directDecompress.getDecompressedData()
42 | ParseResult(Some(ByteString(outcome)), this, true)
43 | } else if (status == DecoderJNI.Status.NEEDS_MORE_INPUT) {
44 | throw ByteStringParser.NeedMoreData
45 | } else {
46 | fail(" Brotli decompression failed - status: " + status)
47 | }
48 | }
49 | }
50 | }
51 |
52 | override def postStop(): Unit = {}
53 |
54 | }
55 |
56 | override def createLogic(attr: Attributes) = new DecompressorParsingLogic {
57 | startWith(DecompressStep)
58 | }
59 |
60 | }
--------------------------------------------------------------------------------
/akka/src/main/scala/akka.stream.scaladsl/BrotliCompression.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package akka.stream.scaladsl
18 |
19 | import akka.NotUsed
20 | import akka.stream.impl.io.compression._
21 | import akka.stream.scaladsl.Flow
22 | import akka.util.ByteString
23 |
24 | import akka.stream.io.compression.brotli.{BrotliCompressor, BrotliDecompressor}
25 |
26 | object BrotliCompression {
27 |
28 | val DefaultQuality: Int = BrotliCompressor.DefaultQuality
29 |
30 | //TODO improve comment
31 |
32 | def brotli: Flow[ByteString, ByteString, NotUsed] =
33 | CompressionUtils.compressorFlow(() => new BrotliCompressor())
34 |
35 | /**
36 | * Creates a flow that gzip-compresses a stream of ByteStrings. Note that the compressor
37 | * will SYNC_FLUSH after every [[ByteString]] so that it is guaranteed that every [[ByteString]]
38 | * coming out of the flow can be fully decompressed without waiting for additional data. This may
39 | * come at a compression performance cost for very small chunks.
40 | *
41 | * FIXME: should strategy / flush mode be configurable? See https://github.com/akka/akka/issues/21849
42 | *
43 | * @param level Compression level (0-11)
44 | */
45 | def brotli(level: Int = BrotliCompressor.DefaultQuality): Flow[ByteString, ByteString, NotUsed] =
46 | CompressionUtils.compressorFlow(() => new BrotliCompressor(level))
47 |
48 | /**
49 | * Creates a Flow that decompresses a brotli-compressed stream of data.
50 | */
51 | def unbrotli(): Flow[ByteString, ByteString, NotUsed] =
52 | Flow[ByteString].via(new BrotliDecompressor()).named("unbrotli")
53 |
54 | }
--------------------------------------------------------------------------------
/pekko/src/main/scala/org.apache.pekko.stream.io.compression.brotli/BrotliDecompressor.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2023 Lightbend Inc.
3 | * - Mariot Chauvin
4 | */
5 |
6 | package org.apache.pekko.stream.io.compression.brotli
7 |
8 | import java.nio.ByteBuffer
9 | import java.util.zip.Inflater
10 | import java.util.zip.ZipException
11 |
12 | import com.aayushatharva.brotli4j.decoder.DirectDecompress
13 | import com.aayushatharva.brotli4j.decoder.DecoderJNI
14 |
15 | import org.apache.pekko.annotation.InternalApi
16 | import org.apache.pekko.stream.Attributes
17 | import org.apache.pekko.stream.impl.io.ByteStringParser
18 | import org.apache.pekko.stream.impl.io.ByteStringParser.{ ParseResult, ParseStep }
19 | import org.apache.pekko.util.ByteString
20 |
21 | class BrotliDecompressor extends ByteStringParser[ByteString] {
22 |
23 | class DecompressorParsingLogic extends ParsingLogic {
24 |
25 | case object DecompressStep extends ParseStep[ByteString] {
26 |
27 | private def fail(msg: String) = throw new ZipException(msg)
28 |
29 | override def onTruncation(): Unit = failStage(new ZipException("Truncated Brotli stream"))
30 |
31 | override def parse(reader: ByteStringParser.ByteReader): ParseResult[ByteString] = {
32 | if (!reader.hasRemaining) {
33 | ParseResult(None, ByteStringParser.FinishedParser, true)
34 | } else {
35 | val data = reader.remainingData.toArrayUnsafe()
36 | val directDecompress = DirectDecompress.decompress(data)
37 | reader.skip(reader.remainingSize)
38 |
39 | val status = directDecompress.getResultStatus()
40 | if (status == DecoderJNI.Status.DONE) {
41 | val outcome = directDecompress.getDecompressedData()
42 | ParseResult(Some(ByteString(outcome)), this, true)
43 | } else if (status == DecoderJNI.Status.NEEDS_MORE_INPUT) {
44 | throw ByteStringParser.NeedMoreData
45 | } else {
46 | fail(" Brotli decompression failed - status: " + status)
47 | }
48 | }
49 | }
50 | }
51 |
52 | override def postStop(): Unit = {}
53 |
54 | }
55 |
56 | override def createLogic(attr: Attributes) = new DecompressorParsingLogic {
57 | startWith(DecompressStep)
58 | }
59 |
60 | }
--------------------------------------------------------------------------------
/akka/src/test/scala/akka.stream.io.compression.brotli/BrotliSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2016 Lightbend Inc. and Mariot Chauvin
3 | */
4 |
5 | package akka.stream.io.compression.brotli
6 |
7 | import java.io.{ InputStream, OutputStream }
8 |
9 | import com.aayushatharva.brotli4j.Brotli4jLoader
10 | import com.aayushatharva.brotli4j.encoder.BrotliOutputStream;
11 | import com.aayushatharva.brotli4j.decoder.BrotliInputStream;
12 | import java.util.zip.ZipException
13 |
14 | import akka.stream.io.compression.brotli.CoderSpec
15 | import akka.stream.impl.io.compression.{ Compressor, GzipCompressor }
16 | import akka.stream.scaladsl.{Flow, BrotliCompression}
17 | import akka.util.ByteString
18 |
19 | class BrotliSpec extends CoderSpec("brotli") {
20 | import CompressionTestingTools._
21 |
22 |
23 | {
24 | Brotli4jLoader.ensureAvailability();
25 | }
26 |
27 | override protected def newCompressor(): Compressor = new BrotliCompressor
28 | override protected val encoderFlow: Flow[ByteString, ByteString, Any] = BrotliCompression.brotli
29 | override protected def decoderFlow(maxBytesPerChunk: Int): Flow[ByteString, ByteString, Any] = BrotliCompression.unbrotli()
30 |
31 | protected def newDecodedInputStream(underlying: InputStream): InputStream =
32 | new BrotliInputStream(underlying)
33 |
34 | override protected def newEncodedOutputStream(underlying: OutputStream): OutputStream =
35 | new BrotliOutputStream(underlying)
36 |
37 | /* There is no CRC in Brotli */
38 | override def corruptInputCheck = false
39 |
40 | override def extraTests(): Unit = {
41 | "decode concatenated compressions" in {
42 | pending //TODO is that something we could support?
43 | ourDecode(Seq(encode("Hello, "), encode("dear "), encode("User!")).join) should readAs("Hello, dear User!")
44 | }
45 | "provide a similar compression ratio than the standard Brotli/Unbortli streams" in {
46 | ourEncode(largeTextBytes).length should be equals streamEncode(largeTextBytes).length
47 | }
48 | "throw an error on truncated input" in {
49 | val ex = the[RuntimeException] thrownBy ourDecode(streamEncode(smallTextBytes).dropRight(5))
50 | ex.ultimateCause.getMessage should equal("Truncated Brotli stream")
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/pekko/src/main/scala/org.apache.pekko.stream.scaladsl/BrotliCompression.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package org.apache.pekko.stream.scaladsl
18 |
19 | import org.apache.pekko.NotUsed
20 | import org.apache.pekko.stream.impl.io.compression._
21 | import org.apache.pekko.stream.scaladsl.Flow
22 | import org.apache.pekko.util.ByteString
23 |
24 | import org.apache.pekko.stream.io.compression.brotli.{BrotliCompressor, BrotliDecompressor}
25 |
26 | object BrotliCompression {
27 |
28 | val DefaultQuality: Int = BrotliCompressor.DefaultQuality
29 |
30 | //TODO improve comment
31 |
32 | def brotli: Flow[ByteString, ByteString, NotUsed] =
33 | CompressionUtils.compressorFlow(() => new BrotliCompressor())
34 |
35 | /**
36 | * Creates a flow that gzip-compresses a stream of ByteStrings. Note that the compressor
37 | * will SYNC_FLUSH after every [[ByteString]] so that it is guaranteed that every [[ByteString]]
38 | * coming out of the flow can be fully decompressed without waiting for additional data. This may
39 | * come at a compression performance cost for very small chunks.
40 | *
41 | * FIXME: should strategy / flush mode be configurable? See https://github.com/akka/akka/issues/21849
42 | *
43 | * @param level Compression level (0-11)
44 | */
45 | def brotli(level: Int = BrotliCompressor.DefaultQuality): Flow[ByteString, ByteString, NotUsed] =
46 | CompressionUtils.compressorFlow(() => new BrotliCompressor(level))
47 |
48 | /**
49 | * Creates a Flow that decompresses a brotli-compressed stream of data.
50 | */
51 | def unbrotli(): Flow[ByteString, ByteString, NotUsed] =
52 | Flow[ByteString].via(new BrotliDecompressor()).named("unbrotli")
53 |
54 | }
--------------------------------------------------------------------------------
/pekko/src/test/scala/org.apache.pekko.stream.io.compression.brotli/BrotliSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2023 Lightbend Inc. and Mariot Chauvin
3 | */
4 |
5 | package org.apache.pekko.stream.io.compression.brotli
6 |
7 | import java.io.{ InputStream, OutputStream }
8 |
9 | import com.aayushatharva.brotli4j.Brotli4jLoader
10 | import com.aayushatharva.brotli4j.encoder.BrotliOutputStream;
11 | import com.aayushatharva.brotli4j.decoder.BrotliInputStream;
12 | import java.util.zip.ZipException
13 |
14 | import org.apache.pekko.stream.io.compression.brotli.CoderSpec
15 | import org.apache.pekko.stream.impl.io.compression.{ Compressor, GzipCompressor }
16 | import org.apache.pekko.stream.scaladsl.{Flow, BrotliCompression}
17 | import org.apache.pekko.util.ByteString
18 |
19 | class BrotliSpec extends CoderSpec("brotli") {
20 | import CompressionTestingTools._
21 |
22 |
23 | {
24 | Brotli4jLoader.ensureAvailability();
25 | }
26 |
27 | override protected def newCompressor(): Compressor = new BrotliCompressor
28 | override protected val encoderFlow: Flow[ByteString, ByteString, Any] = BrotliCompression.brotli
29 | override protected def decoderFlow(maxBytesPerChunk: Int): Flow[ByteString, ByteString, Any] = BrotliCompression.unbrotli()
30 |
31 | protected def newDecodedInputStream(underlying: InputStream): InputStream =
32 | new BrotliInputStream(underlying)
33 |
34 | override protected def newEncodedOutputStream(underlying: OutputStream): OutputStream =
35 | new BrotliOutputStream(underlying)
36 |
37 | /* There is no CRC in Brotli */
38 | override def corruptInputCheck = false
39 |
40 | override def extraTests(): Unit = {
41 | "decode concatenated compressions" in {
42 | pending //TODO is that something we could support?
43 | ourDecode(Seq(encode("Hello, "), encode("dear "), encode("User!")).join) should readAs("Hello, dear User!")
44 | }
45 | "provide a similar compression ratio than the standard Brotli/Unbortli streams" in {
46 | ourEncode(largeTextBytes).length should be equals streamEncode(largeTextBytes).length
47 | }
48 | "throw an error on truncated input" in {
49 | val ex = the[RuntimeException] thrownBy ourDecode(streamEncode(smallTextBytes).dropRight(5))
50 | ex.ultimateCause.getMessage should equal("Truncated Brotli stream")
51 | }
52 | }
53 | }
--------------------------------------------------------------------------------
/akka/src/test/scala/akka.stream.io.compression.brotli/GzipSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2016 Lightbend Inc. and Mariot Chauvin
3 | */
4 |
5 | package akka.stream.io.compression.brotli
6 |
7 | import java.io.{ InputStream, OutputStream }
8 | import java.util.zip.{ GZIPInputStream, GZIPOutputStream, ZipException }
9 |
10 | import akka.stream.io.compression.brotli.CoderSpec
11 | import akka.stream.impl.io.compression.{ Compressor, GzipCompressor }
12 | import akka.stream.scaladsl.{ Compression, Flow }
13 | import akka.util.ByteString
14 |
15 | class GzipSpec extends CoderSpec("gzip") {
16 | import CompressionTestingTools._
17 |
18 | protected def newCompressor(): Compressor = new GzipCompressor
19 | protected val encoderFlow: Flow[ByteString, ByteString, Any] = Compression.gzip
20 | protected def decoderFlow(maxBytesPerChunk: Int): Flow[ByteString, ByteString, Any] =
21 | Compression.gunzip(maxBytesPerChunk)
22 |
23 | protected def newDecodedInputStream(underlying: InputStream): InputStream =
24 | new GZIPInputStream(underlying)
25 |
26 | protected def newEncodedOutputStream(underlying: OutputStream): OutputStream =
27 | new GZIPOutputStream(underlying)
28 |
29 | override def extraTests(): Unit = {
30 | "decode concatenated compressions" in {
31 | ourDecode(Seq(encode("Hello, "), encode("dear "), encode("User!")).join) should readAs("Hello, dear User!")
32 | }
33 | "provide a better compression ratio than the standard Gzip/Gunzip streams" in {
34 | ourEncode(largeTextBytes).length should be < streamEncode(largeTextBytes).length
35 | }
36 | "throw an error on truncated input" in {
37 | val ex = the[RuntimeException] thrownBy ourDecode(streamEncode(smallTextBytes).dropRight(5))
38 | ex.ultimateCause.getMessage should equal("Truncated GZIP stream")
39 | }
40 | "throw an error if compressed data is just missing the trailer at the end" in {
41 | def brokenCompress(payload: String) = newCompressor().compress(ByteString(payload, "UTF-8"))
42 | val ex = the[RuntimeException] thrownBy ourDecode(brokenCompress("abcdefghijkl"))
43 | ex.ultimateCause.getMessage should equal("Truncated GZIP stream")
44 | }
45 | "throw early if header is corrupt" in {
46 | val cause = (the[RuntimeException] thrownBy ourDecode(ByteString(0, 1, 2, 3, 4))).ultimateCause
47 | cause should ((be(a[ZipException]) and have).message("Not in GZIP format"))
48 | }
49 | }
50 | }
--------------------------------------------------------------------------------
/pekko/src/test/scala/org.apache.pekko.stream.io.compression.brotli/GzipSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2023 Lightbend Inc. and Mariot Chauvin
3 | */
4 |
5 | package org.apache.pekko.stream.io.compression.brotli
6 |
7 | import java.io.{ InputStream, OutputStream }
8 | import java.util.zip.{ GZIPInputStream, GZIPOutputStream, ZipException }
9 |
10 | import org.apache.pekko.stream.io.compression.brotli.CoderSpec
11 | import org.apache.pekko.stream.impl.io.compression.{ Compressor, GzipCompressor }
12 | import org.apache.pekko.stream.scaladsl.{ Compression, Flow }
13 | import org.apache.pekko.util.ByteString
14 |
15 | class GzipSpec extends CoderSpec("gzip") {
16 | import CompressionTestingTools._
17 |
18 | protected def newCompressor(): Compressor = new GzipCompressor
19 | protected val encoderFlow: Flow[ByteString, ByteString, Any] = Compression.gzip
20 | protected def decoderFlow(maxBytesPerChunk: Int): Flow[ByteString, ByteString, Any] =
21 | Compression.gunzip(maxBytesPerChunk)
22 |
23 | protected def newDecodedInputStream(underlying: InputStream): InputStream =
24 | new GZIPInputStream(underlying)
25 |
26 | protected def newEncodedOutputStream(underlying: OutputStream): OutputStream =
27 | new GZIPOutputStream(underlying)
28 |
29 | override def extraTests(): Unit = {
30 | "decode concatenated compressions" in {
31 | ourDecode(Seq(encode("Hello, "), encode("dear "), encode("User!")).join) should readAs("Hello, dear User!")
32 | }
33 | "provide a better compression ratio than the standard Gzip/Gunzip streams" in {
34 | ourEncode(largeTextBytes).length should be < streamEncode(largeTextBytes).length
35 | }
36 | "throw an error on truncated input" in {
37 | val ex = the[RuntimeException] thrownBy ourDecode(streamEncode(smallTextBytes).dropRight(5))
38 | ex.ultimateCause.getMessage should equal("Truncated GZIP stream")
39 | }
40 | "throw an error if compressed data is just missing the trailer at the end" in {
41 | def brokenCompress(payload: String) = newCompressor().compress(ByteString(payload, "UTF-8"))
42 | val ex = the[RuntimeException] thrownBy ourDecode(brokenCompress("abcdefghijkl"))
43 | ex.ultimateCause.getMessage should equal("Truncated GZIP stream")
44 | }
45 | "throw early if header is corrupt" in {
46 | val cause = (the[RuntimeException] thrownBy ourDecode(ByteString(0, 1, 2, 3, 4))).ultimateCause
47 | cause should ((be(a[ZipException]) and have).message("Not in GZIP format"))
48 | }
49 | }
50 | }
--------------------------------------------------------------------------------
/akka/src/main/scala/akka.stream.io.compression.brotli/BrotliCompressor.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package akka.stream.io.compression.brotli
18 |
19 |
20 | import akka.util.ByteString
21 | import akka.stream.impl.io.compression.Compressor
22 | import com.aayushatharva.brotli4j.encoder.Encoder
23 |
24 |
25 | object BrotliCompressor {
26 | val MinQuality: Int = 0
27 | val MaxQuality: Int = 11
28 | val DefaultQuality: Int = MaxQuality
29 | }
30 |
31 | class BrotliCompressor(level: Int = BrotliCompressor.DefaultQuality) extends Compressor {
32 |
33 | val params = new Encoder.Parameters().setQuality(level)
34 | val buffer = scala.collection.mutable.ListBuffer.empty[ByteString]
35 |
36 | /**
37 | * Compresses the given input and returns compressed data. The implementation
38 | * can and will choose to buffer output data to improve compression. Use
39 | * `flush` or `compressAndFlush` to make sure that all input data has been
40 | * compressed and pending output data has been returned.
41 | */
42 | override final def compress(input: ByteString): ByteString = {
43 | buffer += input
44 | ByteString.empty
45 | }
46 |
47 | /**
48 | * Flushes any output data and returns the currently remaining compressed data.
49 | */
50 | override final def flush(): ByteString = {
51 | ByteString.empty
52 | }
53 |
54 | /**
55 | * Closes this compressed stream and return the remaining compressed data. After
56 | * calling this method, this Compressor cannot be used any further.
57 | */
58 | override final def finish(): ByteString = {
59 | val input: ByteString = buffer.toList.foldLeft(ByteString.empty)(_ ++ _)
60 | val output = Encoder.compress(input.toArray, params)
61 | ByteString(output)
62 | }
63 |
64 | /** Combines `compress` + `flush` */
65 | override final def compressAndFlush(input: ByteString): ByteString = {
66 | compress(input)
67 | }
68 |
69 | /** Combines `compress` + `finish` */
70 | override final def compressAndFinish(input: ByteString): ByteString = {
71 | compress(input) ++ finish()
72 | }
73 |
74 | /** Make sure any resources have been released */
75 | override final def close(): Unit = {}
76 |
77 | }
--------------------------------------------------------------------------------
/pekko/src/main/scala/org.apache.pekko.stream.io.compression.brotli/BrotliCompressor.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package org.apache.pekko.stream.io.compression.brotli
18 |
19 |
20 | import org.apache.pekko.util.ByteString
21 | import org.apache.pekko.stream.impl.io.compression.Compressor
22 | import com.aayushatharva.brotli4j.encoder.Encoder
23 |
24 |
25 | object BrotliCompressor {
26 | val MinQuality: Int = 0
27 | val MaxQuality: Int = 11
28 | val DefaultQuality: Int = MaxQuality
29 | }
30 |
31 | class BrotliCompressor(level: Int = BrotliCompressor.DefaultQuality) extends Compressor {
32 |
33 | val params = new Encoder.Parameters().setQuality(level)
34 | val buffer = scala.collection.mutable.ListBuffer.empty[ByteString]
35 |
36 | /**
37 | * Compresses the given input and returns compressed data. The implementation
38 | * can and will choose to buffer output data to improve compression. Use
39 | * `flush` or `compressAndFlush` to make sure that all input data has been
40 | * compressed and pending output data has been returned.
41 | */
42 | override final def compress(input: ByteString): ByteString = {
43 | buffer += input
44 | ByteString.empty
45 | }
46 |
47 | /**
48 | * Flushes any output data and returns the currently remaining compressed data.
49 | */
50 | override final def flush(): ByteString = {
51 | ByteString.empty
52 | }
53 |
54 | /**
55 | * Closes this compressed stream and return the remaining compressed data. After
56 | * calling this method, this Compressor cannot be used any further.
57 | */
58 | override final def finish(): ByteString = {
59 | val input: ByteString = buffer.toList.foldLeft(ByteString.empty)(_ ++ _)
60 | val output = Encoder.compress(input.toArray, params)
61 | ByteString(output)
62 | }
63 |
64 | /** Combines `compress` + `flush` */
65 | override final def compressAndFlush(input: ByteString): ByteString = {
66 | compress(input)
67 | }
68 |
69 | /** Combines `compress` + `finish` */
70 | override final def compressAndFinish(input: ByteString): ByteString = {
71 | compress(input) ++ finish()
72 | }
73 |
74 | /** Make sure any resources have been released */
75 | override final def close(): Unit = {}
76 |
77 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Brotli filter for play
2 |
3 | [](http://www.apache.org/licenses/LICENSE-2.0)
4 |
5 |
6 | * A [Brotli](https://opensource.googleblog.com/2015/09/introducing-brotli-new-compression.html) filter for the [Play framework](https://www.playframework.com/)
7 | * A [Brotli](https://opensource.googleblog.com/2015/09/introducing-brotli-new-compression.html) compression `Operator` and a decompression `Operator` for [Akka streams](https://doc.akka.io/docs/akka/current/stream/stream-flows-and-basics.html)
8 | * A [Brotli](https://opensource.googleblog.com/2015/09/introducing-brotli-new-compression.html) compression `Operator` and a decompression `Operator` for [Apache Pekko](https://pekko.apache.org/) streams
9 |
10 |
11 | ## Install
12 |
13 | Add as a dependency:
14 |
15 | * Play **3.0** use [](https://index.scala-lang.org/guardian/play-brotli-filter/play-v30-brotli-filter/)
16 | ```scala
17 | libraryDependencies += "com.gu" %% "play-v30-brotli-filter" % "[latest version number]"
18 | ```
19 | * Play **2.9** use [](https://index.scala-lang.org/guardian/play-brotli-filter/play-v29-brotli-filter/)
20 | ```scala
21 | libraryDependencies += "com.gu" %% "play-v29-brotli-filter" % "[latest version number]"
22 | ```
23 | * Play **2.8** use [](https://index.scala-lang.org/guardian/play-brotli-filter/play-v28-brotli-filter/)
24 | ```scala
25 | libraryDependencies += "com.gu" %% "play-v28-brotli-filter" % "[latest version number]"
26 | ```
27 |
28 | * Akka use [](https://index.scala-lang.org/guardian/play-brotli-filter/akka-stream-brotli/)
29 | ```scala
30 | libraryDependencies += "com.gu" %% "akka-stream-brotli" % "[latest version number]"
31 | ```
32 |
33 | * Pekko use [](https://index.scala-lang.org/guardian/play-brotli-filter/pekko-stream-brotli/)
34 | ```scala
35 | libraryDependencies += "com.gu" %% "pekko-stream-brotli" % "[latest version number]"
36 | ```
37 |
38 | ## Configure
39 |
40 | Currently the only parameter you can configure is quality, which defaults to `11`.
41 |
42 | ```
43 | play.filters {
44 |
45 | # Brotli filter configuration
46 | brotli {
47 |
48 | # The compression-speed vs compression-density tradeoffs. The higher the quality, the slower the compression. Range is 0 to 11
49 | quality = 11
50 |
51 | }
52 | }
53 | ```
54 |
--------------------------------------------------------------------------------
/common/src/main/java/Brotli.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 The Netty Project
3 | *
4 | * The Netty Project licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package brotli4s;
18 |
19 | import com.aayushatharva.brotli4j.Brotli4jLoader;
20 | import java.security.AccessController;
21 | import java.security.PrivilegedAction;
22 |
23 | public final class Brotli {
24 |
25 | private static final ClassNotFoundException CNFE;
26 | private static Throwable cause;
27 |
28 | static {
29 | ClassNotFoundException cnfe = null;
30 |
31 | try {
32 | Class.forName("com.aayushatharva.brotli4j.Brotli4jLoader", false, Brotli.getClassLoader(Brotli.class));
33 | } catch (ClassNotFoundException t) {
34 | cnfe = t;
35 | System.out.println(
36 | "brotli4j not in the classpath; Brotli support will be unavailable.");
37 | }
38 |
39 | CNFE = cnfe;
40 |
41 | // If in the classpath, try to load the native library and initialize brotli4j.
42 | if (cnfe == null) {
43 | cause = Brotli4jLoader.getUnavailabilityCause();
44 | if (cause != null) {
45 | System.out.println("Failed to load brotli4j; Brotli support will be unavailable");
46 | cause.printStackTrace();
47 | }
48 | }
49 | }
50 |
51 | /**
52 | *
53 | * @return true when brotli4j is in the classpath
54 | * and native library is available on this platform and could be loaded
55 | */
56 | public static boolean isAvailable() {
57 | return CNFE == null && Brotli4jLoader.isAvailable();
58 | }
59 |
60 | /**
61 | * Throws when brotli support is missing from the classpath or is unavailable on this platform
62 | * @throws Throwable a ClassNotFoundException if brotli4j is missing
63 | * or a UnsatisfiedLinkError if brotli4j native lib can't be loaded
64 | */
65 | public static void ensureAvailability() throws Throwable {
66 | if (CNFE != null) {
67 | throw CNFE;
68 | }
69 | Brotli4jLoader.ensureAvailability();
70 | }
71 |
72 | /**
73 | * Returns {@link Throwable} of unavailability cause
74 | */
75 | public static Throwable cause() {
76 | return cause;
77 | }
78 |
79 | private static ClassLoader getClassLoader(final Class> clazz) {
80 | if (System.getSecurityManager() == null) {
81 | return clazz.getClassLoader();
82 | } else {
83 | return AccessController.doPrivileged(new PrivilegedAction() {
84 | @Override
85 | public ClassLoader run() {
86 | return clazz.getClassLoader();
87 | }
88 | });
89 | }
90 | }
91 |
92 | private Brotli() {
93 | }
94 | }
--------------------------------------------------------------------------------
/play-v29/src/main/scala/play.api.libs.streams/BrotliFlow.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package play.api.libs.streams
18 |
19 | import java.util.zip.Deflater
20 |
21 | import akka.stream.scaladsl.BrotliCompression
22 | import akka.stream.scaladsl.Flow
23 | import akka.stream.stage._
24 | import akka.stream._
25 | import akka.util.ByteString
26 |
27 | /**
28 | * A simple Brotli Flow
29 | *
30 | * Brotlis each chunk separately.
31 | */
32 | object BrotliFlow {
33 | /**
34 | * Create a Brotli Flow with the given buffer size. The bufferSize controls how much data is sent to the Brotli compressor in
35 | * one go. You can use `0` or `Int.MaxValue` to disable the buffer completely.
36 | *
37 | * In general, it is recommended to turn off the buffer and prevent generation of overlong chunks at the source.
38 | */
39 | def brotli(
40 | bufferSize: Int = 512,
41 | compressionLevel: Int = 9 // TODO use constants
42 | ): Flow[ByteString, ByteString, _] = {
43 | Flow[ByteString]
44 | .via(chunkerIfNeeded(bufferSize))
45 | .via(BrotliCompression.brotli(compressionLevel))
46 | }
47 |
48 | private def chunkerIfNeeded(bufferSize: Int): Flow[ByteString, ByteString, Any] =
49 | if (bufferSize > 0 || bufferSize < Int.MaxValue) Flow.fromGraph(new Chunker(bufferSize))
50 | else Flow[ByteString]
51 |
52 | // http://doc.akka.io/docs/akka/2.4.14/scala/stream/stream-cookbook.html#Chunking_up_a_stream_of_ByteStrings_into_limited_size_ByteStrings
53 | private class Chunker(val chunkSize: Int) extends GraphStage[FlowShape[ByteString, ByteString]] {
54 | private val in = Inlet[ByteString]("Chunker.in")
55 | private val out = Outlet[ByteString]("Chunker.out")
56 |
57 | override val shape: FlowShape[ByteString, ByteString] = FlowShape.of(in, out)
58 | override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
59 | private var buffer = ByteString.empty
60 |
61 | setHandler(out, new OutHandler {
62 | override def onPull(): Unit = emitChunk()
63 | })
64 | setHandler(
65 | in,
66 | new InHandler {
67 | override def onPush(): Unit = {
68 | val elem = grab(in)
69 | buffer ++= elem
70 | emitChunk()
71 | }
72 |
73 | override def onUpstreamFinish(): Unit = {
74 | if (buffer.isEmpty) completeStage()
75 | else {
76 | // There are elements left in buffer, so
77 | // we keep accepting downstream pulls and push from buffer until emptied.
78 | //
79 | // It might be though, that the upstream finished while it was pulled, in which
80 | // case we will not get an onPull from the downstream, because we already had one.
81 | // In that case we need to emit from the buffer.
82 | if (isAvailable(out)) emitChunk()
83 | }
84 | }
85 | }
86 | )
87 |
88 | private def emitChunk(): Unit = {
89 | if (buffer.isEmpty) {
90 | if (isClosed(in)) completeStage()
91 | else pull(in)
92 | } else {
93 | val (chunk, nextBuffer) = buffer.splitAt(chunkSize)
94 | buffer = nextBuffer
95 | push(out, chunk)
96 | }
97 | }
98 | }
99 | }
100 | }
--------------------------------------------------------------------------------
/play-v30/src/main/scala/play.api.libs.streams/BrotliFlow.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package play.api.libs.streams
18 |
19 | import java.util.zip.Deflater
20 |
21 | import org.apache.pekko.stream.scaladsl.BrotliCompression
22 | import org.apache.pekko.stream.scaladsl.Flow
23 | import org.apache.pekko.stream.stage._
24 | import org.apache.pekko.stream._
25 | import org.apache.pekko.util.ByteString
26 |
27 | /**
28 | * A simple Brotli Flow
29 | *
30 | * Brotlis each chunk separately.
31 | */
32 | object BrotliFlow {
33 | /**
34 | * Create a Brotli Flow with the given buffer size. The bufferSize controls how much data is sent to the Brotli compressor in
35 | * one go. You can use `0` or `Int.MaxValue` to disable the buffer completely.
36 | *
37 | * In general, it is recommended to turn off the buffer and prevent generation of overlong chunks at the source.
38 | */
39 | def brotli(
40 | bufferSize: Int = 512,
41 | compressionLevel: Int = 9 // TODO use constants
42 | ): Flow[ByteString, ByteString, _] = {
43 | Flow[ByteString]
44 | .via(chunkerIfNeeded(bufferSize))
45 | .via(BrotliCompression.brotli(compressionLevel))
46 | }
47 |
48 | private def chunkerIfNeeded(bufferSize: Int): Flow[ByteString, ByteString, Any] =
49 | if (bufferSize > 0 || bufferSize < Int.MaxValue) Flow.fromGraph(new Chunker(bufferSize))
50 | else Flow[ByteString]
51 |
52 | // http://doc.akka.io/docs/akka/2.4.14/scala/stream/stream-cookbook.html#Chunking_up_a_stream_of_ByteStrings_into_limited_size_ByteStrings
53 | private class Chunker(val chunkSize: Int) extends GraphStage[FlowShape[ByteString, ByteString]] {
54 | private val in = Inlet[ByteString]("Chunker.in")
55 | private val out = Outlet[ByteString]("Chunker.out")
56 |
57 | override val shape: FlowShape[ByteString, ByteString] = FlowShape.of(in, out)
58 | override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
59 | private var buffer = ByteString.empty
60 |
61 | setHandler(out, new OutHandler {
62 | override def onPull(): Unit = emitChunk()
63 | })
64 | setHandler(
65 | in,
66 | new InHandler {
67 | override def onPush(): Unit = {
68 | val elem = grab(in)
69 | buffer ++= elem
70 | emitChunk()
71 | }
72 |
73 | override def onUpstreamFinish(): Unit = {
74 | if (buffer.isEmpty) completeStage()
75 | else {
76 | // There are elements left in buffer, so
77 | // we keep accepting downstream pulls and push from buffer until emptied.
78 | //
79 | // It might be though, that the upstream finished while it was pulled, in which
80 | // case we will not get an onPull from the downstream, because we already had one.
81 | // In that case we need to emit from the buffer.
82 | if (isAvailable(out)) emitChunk()
83 | }
84 | }
85 | }
86 | )
87 |
88 | private def emitChunk(): Unit = {
89 | if (buffer.isEmpty) {
90 | if (isClosed(in)) completeStage()
91 | else pull(in)
92 | } else {
93 | val (chunk, nextBuffer) = buffer.splitAt(chunkSize)
94 | buffer = nextBuffer
95 | push(out, chunk)
96 | }
97 | }
98 | }
99 | }
100 | }
--------------------------------------------------------------------------------
/akka/src/test/scala/akka.stream.io.compression.brotli/CodecSpecSupport.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2022 Lightbend Inc.
3 | */
4 |
5 | package akka.stream.io.compression.brotli
6 |
7 | import org.scalatest.BeforeAndAfterAll
8 | import org.scalatest.Suite
9 | import org.scalatest.matchers.should.Matchers
10 |
11 | import akka.actor.ActorSystem
12 | import akka.testkit.TestKit
13 | import akka.util.ByteString
14 |
15 | trait CodecSpecSupport extends Matchers with BeforeAndAfterAll { self: Suite =>
16 |
17 | def readAs(string: String, charset: String = "UTF8") =
18 | equal(string).matcher[String].compose { (_: ByteString).decodeString(charset) }
19 | def hexDump(bytes: ByteString) = bytes.map("%02x".format(_)).mkString
20 | def fromHexDump(dump: String) = dump.grouped(2).toArray.map(chars => Integer.parseInt(new String(chars), 16).toByte)
21 |
22 | def printBytes(i: Int, id: String) = {
23 | def byte(i: Int) = (i & 0xFF).toHexString
24 | println(id + ": " + byte(i) + ":" + byte(i >> 8) + ":" + byte(i >> 16) + ":" + byte(i >> 24))
25 | i
26 | }
27 |
28 | lazy val emptyTextBytes = ByteString(emptyText, "UTF8")
29 | lazy val smallTextBytes = ByteString(smallText, "UTF8")
30 | lazy val largeTextBytes = ByteString(largeText, "UTF8")
31 |
32 | val emptyText = ""
33 | val smallText = "Yeah!"
34 | val largeText =
35 | """Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore
36 | magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd
37 | gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing
38 | elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos
39 | et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor
40 | sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et
41 | dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd
42 | gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
43 | Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat
44 | nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis
45 | dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh
46 | euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
47 | Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo
48 | consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu
49 | feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit
50 | augue duis dolore te feugait nulla facilisi.
51 | Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim
52 | assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet
53 | dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit
54 | lobortis nisl ut aliquip ex ea commodo consequat.
55 | Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat
56 | nulla facilisis.
57 | At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem
58 | ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt
59 | ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum.
60 | Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet,
61 | consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et
62 | et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua.
63 | est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor
64 | invidunt ut labore et dolore magna aliquyam erat.
65 | Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam
66 | voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus
67 | est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy e""".replace(
68 | "\r\n",
69 | "\n")
70 |
71 | implicit val system: ActorSystem = ActorSystem(getClass.getSimpleName)
72 |
73 | override def afterAll() = TestKit.shutdownActorSystem(system)
74 | }
--------------------------------------------------------------------------------
/pekko/src/test/scala/org.apache.pekko.stream.io.compression.brotli/CodecSpecSupport.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2022 Lightbend Inc.
3 | */
4 |
5 | package org.apache.pekko.stream.io.compression.brotli
6 |
7 | import org.scalatest.BeforeAndAfterAll
8 | import org.scalatest.Suite
9 | import org.scalatest.matchers.should.Matchers
10 |
11 | import org.apache.pekko.actor.ActorSystem
12 | import org.apache.pekko.testkit.TestKit
13 | import org.apache.pekko.util.ByteString
14 |
15 | trait CodecSpecSupport extends Matchers with BeforeAndAfterAll { self: Suite =>
16 |
17 | def readAs(string: String, charset: String = "UTF8") =
18 | equal(string).matcher[String].compose { (_: ByteString).decodeString(charset) }
19 | def hexDump(bytes: ByteString) = bytes.map("%02x".format(_)).mkString
20 | def fromHexDump(dump: String) = dump.grouped(2).toArray.map(chars => Integer.parseInt(new String(chars), 16).toByte)
21 |
22 | def printBytes(i: Int, id: String) = {
23 | def byte(i: Int) = (i & 0xFF).toHexString
24 | println(id + ": " + byte(i) + ":" + byte(i >> 8) + ":" + byte(i >> 16) + ":" + byte(i >> 24))
25 | i
26 | }
27 |
28 | lazy val emptyTextBytes = ByteString(emptyText, "UTF8")
29 | lazy val smallTextBytes = ByteString(smallText, "UTF8")
30 | lazy val largeTextBytes = ByteString(largeText, "UTF8")
31 |
32 | val emptyText = ""
33 | val smallText = "Yeah!"
34 | val largeText =
35 | """Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore
36 | magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd
37 | gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing
38 | elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos
39 | et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor
40 | sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et
41 | dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd
42 | gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
43 | Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat
44 | nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit augue duis
45 | dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh
46 | euismod tincidunt ut laoreet dolore magna aliquam erat volutpat.
47 | Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea commodo
48 | consequat. Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu
49 | feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit
50 | augue duis dolore te feugait nulla facilisi.
51 | Nam liber tempor cum soluta nobis eleifend option congue nihil imperdiet doming id quod mazim placerat facer possim
52 | assum. Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet
53 | dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit
54 | lobortis nisl ut aliquip ex ea commodo consequat.
55 | Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu feugiat
56 | nulla facilisis.
57 | At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem
58 | ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt
59 | ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum.
60 | Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet,
61 | consetetur sadipscing elitr, At accusam aliquyam diam diam dolore dolores duo eirmod eos erat, et nonumy sed tempor et
62 | et invidunt justo labore Stet clita ea et gubergren, kasd magna no rebum. sanctus sea sed takimata ut vero voluptua.
63 | est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor
64 | invidunt ut labore et dolore magna aliquyam erat.
65 | Consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam
66 | voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus
67 | est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy e""".replace(
68 | "\r\n",
69 | "\n")
70 |
71 | implicit val system: ActorSystem = ActorSystem(getClass.getSimpleName)
72 |
73 | override def afterAll() = TestKit.shutdownActorSystem(system)
74 | }
--------------------------------------------------------------------------------
/play-v29/src/test/scala/play.filters.brotli/BrotliFilterSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2016 Lightbend Inc. and Mariot Chauvin
3 | */
4 | package play.filters.brotli
5 |
6 | import javax.inject.Inject
7 |
8 | import akka.stream.Materializer
9 | import akka.stream.scaladsl.Source
10 | import akka.util.ByteString
11 | import play.api.Application
12 | import play.api.http.{ HttpEntity, HttpFilters }
13 | import play.api.inject._
14 | import play.api.inject.guice.GuiceApplicationBuilder
15 | import play.api.routing.{Router, SimpleRouterImpl}
16 | import play.api.test._
17 | import play.api.mvc.{ AnyContentAsEmpty, Action, DefaultActionBuilder, Result }
18 | import play.api.mvc.Results._
19 | import java.io.ByteArrayInputStream
20 | import java.io.ByteArrayOutputStream
21 | import java.nio.file.Path
22 | import java.nio.file.Files
23 | import akka.stream.scaladsl.FileIO
24 | import org.apache.commons.io.IOUtils
25 | import scala.concurrent.Future
26 | import scala.util.Random
27 | import org.specs2.matcher.{DataTables, MatchResult}
28 |
29 | import com.aayushatharva.brotli4j.decoder.BrotliInputStream;
30 | import play.api.mvc.Cookie
31 |
32 | object BrotliFilterSpec extends PlaySpecification with DataTables {
33 |
34 | sequential
35 |
36 | "The BrotliFilter" should {
37 |
38 | "compress responses with brotli" in withApplication(Ok("hello")) { implicit app =>
39 | checkCompressedBody(makeBrotliRequest(app), "hello")(app.materializer)
40 | }
41 |
42 | """compress a response with brotli if (and only if) it is accepted and preferred by the request.
43 | |Although not explicitly mentioned in RFC 2616 sect. 14.3, the default qvalue
44 | |is assumed to be 1 for all mentioned codings. If no "*" is present, unmentioned
45 | |codings are assigned a qvalue of 0, except the identity coding which gets q=0.001,
46 | |which is the lowest possible acceptable qvalue.
47 | |This seems to be the most consistent behaviour with respect to the other "accept"
48 | |header fields described in sect 14.1-5.""".stripMargin in withApplication(Ok("meep")) { implicit app =>
49 |
50 | val (plain, bred) = (None, Some("br"))
51 |
52 | "Accept-Encoding of request" || "Response" |
53 | //------------------------------------++------------+
54 | "br" !! bred |
55 | "compress,br" !! bred |
56 | "compress, br" !! bred |
57 | "br,compress" !! bred |
58 | "deflate, br,compress" !! bred |
59 | "br, compress" !! bred |
60 | "identity, br, compress" !! bred |
61 | "*" !! bred |
62 | "*;q=0" !! plain |
63 | "*; q=0" !! plain |
64 | "*;q=0.000" !! plain |
65 | "br;q=0" !! plain |
66 | "br; q=0.00" !! plain |
67 | "*;q=0, br" !! bred |
68 | "compress;q=0.1, *;q=0, br" !! bred |
69 | "compress;q=0.1, *;q=0, br;q=0.005" !! bred |
70 | "compress, br;q=0.001" !! bred |
71 | "compress, br;q=0.002" !! bred |
72 | "compress;q=1, *;q=0, br;q=0.000" !! plain |
73 | "compress;q=1, *;q=0" !! plain |
74 | "identity" !! plain |
75 | "br;q=0.5, identity" !! plain |
76 | "br;q=0.5, identity;q=1" !! plain |
77 | "br;q=0.6, identity;q=0.5" !! bred |
78 | "*;q=0.7, br;q=0.6, identity;q=0.4" !! bred |
79 | "" !! plain |> {
80 |
81 | (codings, expectedEncoding) =>
82 | header(CONTENT_ENCODING, requestAccepting(app, codings)) must be equalTo (expectedEncoding)
83 | }
84 | }
85 |
86 | "not brotli empty responses" in withApplication(Ok) { implicit app =>
87 | checkNotCompressed(makeBrotliRequest(app), "")(app.materializer)
88 | }
89 |
90 | "not brotli responses when not requested" in withApplication(Ok("hello")) { implicit app =>
91 | checkNotCompressed(route(app, FakeRequest()).get, "hello")(app.materializer)
92 | }
93 |
94 | "not brotli HEAD requests" in withApplication(Ok) { implicit app =>
95 | checkNotCompressed(route(app, FakeRequest("HEAD", "/").withHeaders(ACCEPT_ENCODING -> "br")).get, "")(app.materializer)
96 | }
97 |
98 | "not brotli no content responses" in withApplication(NoContent) { implicit app =>
99 | checkNotCompressed(makeBrotliRequest(app), "")(app.materializer)
100 | }
101 |
102 | "not brotli not modified responses" in withApplication(NotModified) { implicit app =>
103 | checkNotCompressed(makeBrotliRequest(app), "")(app.materializer)
104 | }
105 |
106 | "brotli chunked responses" in withApplication(Ok.chunked(Source(List("foo", "bar")))) { implicit app =>
107 | val result = makeBrotliRequest(app)
108 |
109 | checkCompressedBody(result, "foobar")(app.materializer)
110 | await(result).body must beAnInstanceOf[HttpEntity.Chunked]
111 | }
112 |
113 | val body = Random.nextString(1000)
114 |
115 | "not buffer more than the configured threshold" in withApplication(
116 | Ok.sendEntity(HttpEntity.Streamed(Source.single(ByteString(body)), Some(1000), None)), chunkedThreshold = 512) { implicit app =>
117 | val result = makeBrotliRequest(app)
118 | checkCompressedBody(result, body)(app.materializer)
119 | await(result).body must beAnInstanceOf[HttpEntity.Chunked]
120 | }
121 |
122 |
123 | "brotli a strict body even if it exceeds the threshold" in withApplication(Ok(body), chunkedThreshold = 512) { implicit app =>
124 | val result = makeBrotliRequest(app)
125 | checkCompressedBody(result, body)(app.materializer)
126 | await(result).body must beAnInstanceOf[HttpEntity.Strict]
127 | }
128 |
129 | val path: Path = Path.of(getClass.getResource("/bootstrap.min.css").toURI())
130 | val source = akka.stream.scaladsl.FileIO.fromPath(path)
131 | val contentLength = Files.size(path)
132 |
133 | "brotli entire content for large files" in withApplication(
134 | Ok.sendEntity(HttpEntity.Streamed(source, Some(contentLength), Some("text/css"))), chunkedThreshold = 512) { implicit app =>
135 | val result = makeBrotliRequest(app)
136 | checkCompressedBodyLength(result, contentLength)(app.materializer)
137 | await(result).body must beAnInstanceOf[HttpEntity.Chunked]
138 | }
139 |
140 | "preserve original headers" in withApplication(Ok("hello").withHeaders(SERVER -> "Play")) { implicit app =>
141 | val result = makeBrotliRequest(app)
142 | checkCompressed(result)
143 | header(SERVER, result) must beSome("Play")
144 | }
145 |
146 | "preserve original cookies" in withApplication(Ok("hello").withCookies(Cookie("foo", "bar"))) { implicit app =>
147 | val result = makeBrotliRequest(app)
148 | checkCompressed(result)
149 | cookies(result).get("foo") must beSome(Cookie("foo", "bar"))
150 | }
151 |
152 | "preserve original session" in withApplication(Ok("hello").withSession("foo" -> "bar")) { implicit app =>
153 | val result = makeBrotliRequest(app)
154 | checkCompressed(result)
155 | session(result).get("foo") must beSome("bar")
156 | }
157 |
158 | "preserve original Vary header values" in withApplication(Ok("hello").withHeaders(VARY -> "original")) { implicit app =>
159 | val result = makeBrotliRequest(app)
160 | checkCompressed(result)
161 | header(VARY, result) must beSome[String].which(header => header.contains("original,"))
162 | }
163 |
164 | "preserve original Vary header values and not duplicate case-insensitive ACCEPT-ENCODING" in withApplication(Ok("hello").withHeaders(VARY -> "original,ACCEPT-encoding")) { implicit app =>
165 | val result = makeBrotliRequest(app)
166 | checkCompressed(result)
167 | header(VARY, result) must beSome[String].which(header => header.split(",").filter(_.toLowerCase(java.util.Locale.ENGLISH) == ACCEPT_ENCODING.toLowerCase(java.util.Locale.ENGLISH)).size == 1)
168 | }
169 | }
170 |
171 | class Filters @Inject() (brotliFilter: BrotliFilter) extends HttpFilters {
172 | def filters = Seq(brotliFilter)
173 | }
174 |
175 | class ResultRouter @Inject() (action: DefaultActionBuilder, result: Result) extends SimpleRouterImpl({ case _ => action(result) })
176 |
177 | def withApplication[T](result: Result, quality: Int = 5, chunkedThreshold: Int = 1024)(block: Application => T): T = {
178 | val application = new GuiceApplicationBuilder()
179 | .configure(
180 | "akka.loglevel" -> "OFF",
181 | "play.filters.brotli.quality" -> quality,
182 | "play.filters.brotli.chunkedThreshold" -> chunkedThreshold
183 | ).overrides(
184 | bind[Result].to(result),
185 | bind[Router].to[ResultRouter],
186 | bind[HttpFilters].to[Filters]
187 | ).build()
188 | running(application)(block(application))
189 | }
190 |
191 | def brotliRequest: FakeRequest[AnyContentAsEmpty.type] = FakeRequest().withHeaders(ACCEPT_ENCODING -> "br")
192 |
193 | def makeBrotliRequest(app: Application): Future[Result] = route(app, brotliRequest).get
194 |
195 | def requestAccepting(app: Application, codings: String): Future[Result] = route(app, FakeRequest().withHeaders(ACCEPT_ENCODING -> codings)).get
196 |
197 | def uncompress(bytes: ByteString): String = {
198 | val is = new BrotliInputStream(new ByteArrayInputStream(bytes.toArray))
199 | val result = IOUtils.toString(is, "UTF-8")
200 | is.close()
201 | result
202 | }
203 |
204 | def uncompressBytes(bytes: ByteString): Array[Byte] = {
205 | val is = new BrotliInputStream(new ByteArrayInputStream(bytes.toArray))
206 | val result = IOUtils.toByteArray(is)
207 | is.close()
208 | result
209 | }
210 | def checkCompressed(result: Future[Result]): MatchResult[Option[String]] = {
211 | header(CONTENT_ENCODING, result) aka "Content encoding header" must beSome("br")
212 | }
213 |
214 | def checkCompressedBodyLength(result: Future[Result], contentLength: Long)(implicit mat: Materializer): MatchResult[Any] = {
215 | checkCompressed(result)
216 | val resultBody = contentAsBytes(result)
217 | await(result).body.contentLength.foreach { cl =>
218 | resultBody.length must_== cl
219 | }
220 | uncompressBytes(resultBody).length must_== contentLength
221 | }
222 | def checkCompressedBody(result: Future[Result], body: String)(implicit mat: Materializer): MatchResult[Any] = {
223 | checkCompressed(result)
224 | val resultBody = contentAsBytes(result)
225 | await(result).body.contentLength.foreach { cl =>
226 | resultBody.length must_== cl
227 | }
228 | uncompress(resultBody) must_== body
229 | }
230 |
231 | def checkNotCompressed(result: Future[Result], body: String)(implicit mat: Materializer): MatchResult[Any] = {
232 | header(CONTENT_ENCODING, result) must beNone
233 | contentAsString(result) must_== body
234 | }
235 | }
236 |
--------------------------------------------------------------------------------
/play-v30/src/test/scala/play.filters.brotli/BrotliFilterSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2023 Lightbend Inc. and Mariot Chauvin
3 | */
4 | package play.filters.brotli
5 |
6 | import javax.inject.Inject
7 |
8 | import org.apache.pekko.stream.Materializer
9 | import org.apache.pekko.stream.scaladsl.Source
10 | import org.apache.pekko.util.ByteString
11 | import play.api.Application
12 | import play.api.http.{ HttpEntity, HttpFilters }
13 | import play.api.inject._
14 | import play.api.inject.guice.GuiceApplicationBuilder
15 | import play.api.routing.{Router, SimpleRouterImpl}
16 | import play.api.test._
17 | import play.api.mvc.{ AnyContentAsEmpty, Action, DefaultActionBuilder, Result }
18 | import play.api.mvc.Results._
19 | import java.io.ByteArrayInputStream
20 | import java.io.ByteArrayOutputStream
21 | import java.nio.file.Path
22 | import java.nio.file.Files
23 | import org.apache.pekko.stream.scaladsl.FileIO
24 | import org.apache.commons.io.IOUtils
25 | import scala.concurrent.Future
26 | import scala.util.Random
27 | import org.specs2.matcher.{DataTables, MatchResult}
28 |
29 | import com.aayushatharva.brotli4j.decoder.BrotliInputStream;
30 | import play.api.mvc.Cookie
31 |
32 | object BrotliFilterSpec extends PlaySpecification with DataTables {
33 |
34 | sequential
35 |
36 | "The BrotliFilter" should {
37 |
38 | "compress responses with brotli" in withApplication(Ok("hello")) { implicit app =>
39 | checkCompressedBody(makeBrotliRequest(app), "hello")(app.materializer)
40 | }
41 |
42 | """compress a response with brotli if (and only if) it is accepted and preferred by the request.
43 | |Although not explicitly mentioned in RFC 2616 sect. 14.3, the default qvalue
44 | |is assumed to be 1 for all mentioned codings. If no "*" is present, unmentioned
45 | |codings are assigned a qvalue of 0, except the identity coding which gets q=0.001,
46 | |which is the lowest possible acceptable qvalue.
47 | |This seems to be the most consistent behaviour with respect to the other "accept"
48 | |header fields described in sect 14.1-5.""".stripMargin in withApplication(Ok("meep")) { implicit app =>
49 |
50 | val (plain, bred) = (None, Some("br"))
51 |
52 | "Accept-Encoding of request" || "Response" |
53 | //------------------------------------++------------+
54 | "br" !! bred |
55 | "compress,br" !! bred |
56 | "compress, br" !! bred |
57 | "br,compress" !! bred |
58 | "deflate, br,compress" !! bred |
59 | "br, compress" !! bred |
60 | "identity, br, compress" !! bred |
61 | "*" !! bred |
62 | "*;q=0" !! plain |
63 | "*; q=0" !! plain |
64 | "*;q=0.000" !! plain |
65 | "br;q=0" !! plain |
66 | "br; q=0.00" !! plain |
67 | "*;q=0, br" !! bred |
68 | "compress;q=0.1, *;q=0, br" !! bred |
69 | "compress;q=0.1, *;q=0, br;q=0.005" !! bred |
70 | "compress, br;q=0.001" !! bred |
71 | "compress, br;q=0.002" !! bred |
72 | "compress;q=1, *;q=0, br;q=0.000" !! plain |
73 | "compress;q=1, *;q=0" !! plain |
74 | "identity" !! plain |
75 | "br;q=0.5, identity" !! plain |
76 | "br;q=0.5, identity;q=1" !! plain |
77 | "br;q=0.6, identity;q=0.5" !! bred |
78 | "*;q=0.7, br;q=0.6, identity;q=0.4" !! bred |
79 | "" !! plain |> {
80 |
81 | (codings, expectedEncoding) =>
82 | header(CONTENT_ENCODING, requestAccepting(app, codings)) must be equalTo (expectedEncoding)
83 | }
84 | }
85 |
86 | "not brotli empty responses" in withApplication(Ok) { implicit app =>
87 | checkNotCompressed(makeBrotliRequest(app), "")(app.materializer)
88 | }
89 |
90 | "not brotli responses when not requested" in withApplication(Ok("hello")) { implicit app =>
91 | checkNotCompressed(route(app, FakeRequest()).get, "hello")(app.materializer)
92 | }
93 |
94 | "not brotli HEAD requests" in withApplication(Ok) { implicit app =>
95 | checkNotCompressed(route(app, FakeRequest("HEAD", "/").withHeaders(ACCEPT_ENCODING -> "br")).get, "")(app.materializer)
96 | }
97 |
98 | "not brotli no content responses" in withApplication(NoContent) { implicit app =>
99 | checkNotCompressed(makeBrotliRequest(app), "")(app.materializer)
100 | }
101 |
102 | "not brotli not modified responses" in withApplication(NotModified) { implicit app =>
103 | checkNotCompressed(makeBrotliRequest(app), "")(app.materializer)
104 | }
105 |
106 | "brotli chunked responses" in withApplication(Ok.chunked(Source(List("foo", "bar")))) { implicit app =>
107 | val result = makeBrotliRequest(app)
108 |
109 | checkCompressedBody(result, "foobar")(app.materializer)
110 | await(result).body must beAnInstanceOf[HttpEntity.Chunked]
111 | }
112 |
113 | val body = Random.nextString(1000)
114 |
115 | "not buffer more than the configured threshold" in withApplication(
116 | Ok.sendEntity(HttpEntity.Streamed(Source.single(ByteString(body)), Some(1000), None)), chunkedThreshold = 512) { implicit app =>
117 | val result = makeBrotliRequest(app)
118 | checkCompressedBody(result, body)(app.materializer)
119 | await(result).body must beAnInstanceOf[HttpEntity.Chunked]
120 | }
121 |
122 |
123 | "brotli a strict body even if it exceeds the threshold" in withApplication(Ok(body), chunkedThreshold = 512) { implicit app =>
124 | val result = makeBrotliRequest(app)
125 | checkCompressedBody(result, body)(app.materializer)
126 | await(result).body must beAnInstanceOf[HttpEntity.Strict]
127 | }
128 |
129 | val path: Path = Path.of(getClass.getResource("/bootstrap.min.css").toURI())
130 | val source = FileIO.fromPath(path)
131 | val contentLength = Files.size(path)
132 |
133 | "brotli entire content for large files" in withApplication(
134 | Ok.sendEntity(HttpEntity.Streamed(source, Some(contentLength), Some("text/css"))), chunkedThreshold = 512) { implicit app =>
135 | val result = makeBrotliRequest(app)
136 | checkCompressedBodyLength(result, contentLength)(app.materializer)
137 | await(result).body must beAnInstanceOf[HttpEntity.Chunked]
138 | }
139 |
140 | "preserve original headers" in withApplication(Ok("hello").withHeaders(SERVER -> "Play")) { implicit app =>
141 | val result = makeBrotliRequest(app)
142 | checkCompressed(result)
143 | header(SERVER, result) must beSome("Play")
144 | }
145 |
146 | "preserve original cookies" in withApplication(Ok("hello").withCookies(Cookie("foo", "bar"))) { implicit app =>
147 | val result = makeBrotliRequest(app)
148 | checkCompressed(result)
149 | cookies(result).get("foo") must beSome(Cookie("foo", "bar"))
150 | }
151 |
152 | "preserve original session" in withApplication(Ok("hello").withSession("foo" -> "bar")) { implicit app =>
153 | val result = makeBrotliRequest(app)
154 | checkCompressed(result)
155 | session(result).get("foo") must beSome("bar")
156 | }
157 |
158 | "preserve original Vary header values" in withApplication(Ok("hello").withHeaders(VARY -> "original")) { implicit app =>
159 | val result = makeBrotliRequest(app)
160 | checkCompressed(result)
161 | header(VARY, result) must beSome[String].which(header => header.contains("original,"))
162 | }
163 |
164 | "preserve original Vary header values and not duplicate case-insensitive ACCEPT-ENCODING" in withApplication(Ok("hello").withHeaders(VARY -> "original,ACCEPT-encoding")) { implicit app =>
165 | val result = makeBrotliRequest(app)
166 | checkCompressed(result)
167 | header(VARY, result) must beSome[String].which(header => header.split(",").filter(_.toLowerCase(java.util.Locale.ENGLISH) == ACCEPT_ENCODING.toLowerCase(java.util.Locale.ENGLISH)).size == 1)
168 | }
169 | }
170 |
171 | class Filters @Inject() (brotliFilter: BrotliFilter) extends HttpFilters {
172 | def filters = Seq(brotliFilter)
173 | }
174 |
175 | class ResultRouter @Inject() (action: DefaultActionBuilder, result: Result) extends SimpleRouterImpl({ case _ => action(result) })
176 |
177 | def withApplication[T](result: Result, quality: Int = 5, chunkedThreshold: Int = 1024)(block: Application => T): T = {
178 | val application = new GuiceApplicationBuilder()
179 | .configure(
180 | "play.filters.brotli.quality" -> quality,
181 | "play.filters.brotli.chunkedThreshold" -> chunkedThreshold
182 | ).overrides(
183 | bind[Result].to(result),
184 | bind[Router].to[ResultRouter],
185 | bind[HttpFilters].to[Filters]
186 | ).build()
187 | running(application)(block(application))
188 | }
189 |
190 | def brotliRequest: FakeRequest[AnyContentAsEmpty.type] = FakeRequest().withHeaders(ACCEPT_ENCODING -> "br")
191 |
192 | def makeBrotliRequest(app: Application): Future[Result] = route(app, brotliRequest).get
193 |
194 | def requestAccepting(app: Application, codings: String): Future[Result] = route(app, FakeRequest().withHeaders(ACCEPT_ENCODING -> codings)).get
195 |
196 | def uncompress(bytes: ByteString): String = {
197 | val is = new BrotliInputStream(new ByteArrayInputStream(bytes.toArray))
198 | val result = IOUtils.toString(is, "UTF-8")
199 | is.close()
200 | result
201 | }
202 |
203 | def uncompressBytes(bytes: ByteString): Array[Byte] = {
204 | val is = new BrotliInputStream(new ByteArrayInputStream(bytes.toArray))
205 | val result = IOUtils.toByteArray(is)
206 | is.close()
207 | result
208 | }
209 |
210 | def checkCompressed(result: Future[Result]): MatchResult[Option[String]] = {
211 | header(CONTENT_ENCODING, result) aka "Content encoding header" must beSome("br")
212 | }
213 |
214 | def checkCompressedBodyLength(result: Future[Result], contentLength: Long)(implicit mat: Materializer): MatchResult[Any] = {
215 | checkCompressed(result)
216 | val resultBody = contentAsBytes(result)
217 | await(result).body.contentLength.foreach { cl =>
218 | resultBody.length must_== cl
219 | }
220 | uncompressBytes(resultBody).length must_== contentLength
221 | }
222 |
223 | def checkCompressedBody(result: Future[Result], body: String)(implicit mat: Materializer): MatchResult[Any] = {
224 | checkCompressed(result)
225 | val resultBody = contentAsBytes(result)
226 | await(result).body.contentLength.foreach { cl =>
227 | resultBody.length must_== cl
228 | }
229 | uncompress(resultBody) must_== body
230 | }
231 |
232 | def checkNotCompressed(result: Future[Result], body: String)(implicit mat: Materializer): MatchResult[Any] = {
233 | header(CONTENT_ENCODING, result) must beNone
234 | contentAsString(result) must_== body
235 | }
236 | }
237 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/play-v29/src/main/scala/play.filters.brotli/BrotliFilter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package play.filters.brotli
18 |
19 | import java.util.function.BiFunction
20 | import javax.inject.{ Provider, Inject, Singleton }
21 |
22 | import akka.stream.{ OverflowStrategy, FlowShape, Materializer }
23 | import akka.stream.scaladsl._
24 | import akka.util.ByteString
25 | import com.typesafe.config.ConfigMemorySize
26 | import play.api.http.HttpProtocol
27 | import play.api.inject.Module
28 | import play.api.{ Environment, Configuration }
29 | import play.api.mvc._
30 | import play.api.libs.streams.BrotliFlow
31 | import scala.concurrent.{ Future, ExecutionContextExecutor}
32 | import play.api.http.{ HttpChunk, HttpEntity, Status }
33 | import scala.compat.java8.FunctionConverters._
34 |
35 | import com.aayushatharva.brotli4j.encoder.BrotliOutputStream;
36 | import com.aayushatharva.brotli4j.encoder.Encoder;
37 |
38 | /**
39 | * A brotli filter.
40 | *
41 | * This filter may compress with brotli the responses for any requests that aren't HEAD requests and specify an accept encoding of brotli.
42 | *
43 | * It won't compresss under the following conditions:
44 | *
45 | * - The response code is 204 or 304 (these codes MUST NOT contain a body, and an empty compressed response is 20 bytes
46 | * long)
47 | * - The response already defines a Content-Encoding header
48 | * - A custom shouldBrotli function is supplied and it returns false
49 | *
50 | * Since compressing changes the content length of the response, this filter may do some buffering - it will buffer any
51 | * streamed responses that define a content length less than the configured chunked threshold. Responses that are
52 | * greater in length, or that don't define a content length, will not be buffered, but will be sent as chunked
53 | * responses.
54 | */
55 | @Singleton
56 | class BrotliFilter @Inject() (config: BrotliFilterConfig)(implicit mat: Materializer) extends EssentialFilter {
57 |
58 |
59 | {
60 | brotli4s.Brotli.ensureAvailability()
61 | }
62 |
63 | import play.api.http.HeaderNames._
64 |
65 | def this(quality: Int = 5,
66 | bufferSize: Int = 8192,
67 | chunkedThreshold: Int = 102400,
68 | shouldBrotli: (RequestHeader, Result) => Boolean = (_, _) => true)(implicit mat: Materializer) = {
69 | this(BrotliFilterConfig(quality, bufferSize, chunkedThreshold, shouldBrotli))
70 | }
71 |
72 |
73 | def apply(next: EssentialAction) = new EssentialAction {
74 | implicit val ec: ExecutionContextExecutor = mat.executionContext
75 | def apply(request: RequestHeader) = {
76 | if (mayCompress(request)) {
77 | next(request).mapFuture(result => handleResult(request, result))
78 | } else {
79 | next(request)
80 | }
81 | }
82 | }
83 |
84 | private def handleResult(request: RequestHeader, result: Result): Future[Result] = {
85 | if (shouldCompress(result) && config.shouldBrotli(request, result)) {
86 | implicit val ec = mat.executionContext
87 | val header = result.header.copy(headers = setupHeader(result.header.headers))
88 |
89 | result.body match {
90 |
91 | case HttpEntity.Strict(data, contentType) =>
92 | Future.successful(result.copy(header = header, body = compressStrictEntity(data, contentType)))
93 |
94 |
95 | case entity @ HttpEntity.Streamed(_, Some(contentLength), contentType) if contentLength <= config.chunkedThreshold =>
96 | // It's below the chunked threshold, so buffer then compress and send
97 | entity.consumeData.map { data =>
98 | result.copy(header = header, body = compressStrictEntity(data, contentType))
99 | }
100 |
101 | case HttpEntity.Streamed(data, _, contentType) if request.version == HttpProtocol.HTTP_1_0 =>
102 | // It's above the chunked threshold, but we can't chunk it because we're using HTTP 1.0.
103 | // Instead, we use a close delimited body (ie, regular body with no content length)
104 | val compressed = data.via(createBrotliFlow)
105 | Future.successful(
106 | result.copy(header = header, body = HttpEntity.Streamed(compressed, None, contentType))
107 | )
108 |
109 | case HttpEntity.Streamed(data, _, contentType) =>
110 | // It's above the chunked threshold, compress through the brotli flow, and send as chunked
111 | val compressed = data.via(createBrotliFlow).map(d => HttpChunk.Chunk(d))
112 | Future.successful(result.copy(header = header, body = HttpEntity.Chunked(compressed, contentType)))
113 |
114 | case HttpEntity.Chunked(chunks, contentType) =>
115 | val wrappedFlow = Flow.fromGraph(GraphDSL.create[FlowShape[HttpChunk, HttpChunk]]() { implicit builder =>
116 | import GraphDSL.Implicits._
117 |
118 | val extractChunks = Flow[HttpChunk] collect { case HttpChunk.Chunk(data) => data }
119 |
120 | val foldData = Flow[ByteString].fold(ByteString.empty)((acc, x) => acc ++ x)
121 |
122 | val createChunks = Flow[ByteString].map[HttpChunk](HttpChunk.Chunk.apply)
123 | val filterLastChunk = Flow[HttpChunk]
124 | .filter(_.isInstanceOf[HttpChunk.LastChunk])
125 | // Since we're doing a merge by concatenating, the filter last chunk won't receive demand until the brotli
126 | // flow is finished. But the broadcast won't start broadcasting until both flows start demanding. So we
127 | // put a buffer of one in to ensure the filter last chunk flow demands from the broadcast.
128 | .buffer(1, OverflowStrategy.backpressure)
129 |
130 | val broadcast = builder.add(Broadcast[HttpChunk](2))
131 | val concat = builder.add(Concat[HttpChunk]())
132 |
133 | // Broadcast the stream through two separate flows, one that collects chunks and turns them into
134 | // ByteStrings, fold into one ByteString then sends that ByteString through the Brotli flow, and then turns them back into chunks,
135 | // the other that just allows the last chunk through. Then concat those two flows together.
136 | broadcast.out(0) ~> extractChunks ~> foldData ~> createBrotliFlow ~> createChunks ~> concat.in(0)
137 | broadcast.out(1) ~> filterLastChunk ~> concat.in(1)
138 |
139 | new FlowShape(broadcast.in, concat.out)
140 | })
141 | Future.successful(result.copy(header = header, body = HttpEntity.Chunked(chunks via wrappedFlow, contentType)))
142 | }
143 | } else {
144 | Future.successful(result)
145 | }
146 | }
147 |
148 |
149 | private def createBrotliFlow: Flow[ByteString, ByteString, _] =
150 | BrotliFlow.brotli(config.bufferSize, config.quality)
151 |
152 | private def compressStrictEntity(data: ByteString, contentType: Option[String]) = {
153 | val builder = ByteString.newBuilder
154 | val brotliParameters = new Encoder.Parameters().setQuality(config.quality)
155 | val gzipOs = new BrotliOutputStream(builder.asOutputStream, brotliParameters)
156 | gzipOs.write(data.toArray)
157 | gzipOs.close()
158 | HttpEntity.Strict(builder.result(), contentType)
159 | }
160 |
161 | /**
162 | * Whether this request may be compressed.
163 | */
164 | private def mayCompress(request: RequestHeader) =
165 | request.method != "HEAD" && brotliIsAcceptedAndPreferredBy(request)
166 |
167 | // TODO modify
168 | private def brotliIsAcceptedAndPreferredBy(request: RequestHeader) = {
169 | val codings = play.api.mvc.RequestHeader.acceptHeader(request.headers, ACCEPT_ENCODING)
170 | def explicitQValue(coding: String) = codings collectFirst { case (q, c) if c equalsIgnoreCase coding => q }
171 | def defaultQValue(coding: String) = if (coding == "identity") 0.001d else 0d
172 | def qvalue(coding: String) = explicitQValue(coding) orElse explicitQValue("*") getOrElse defaultQValue(coding)
173 |
174 | qvalue("br") > 0d && qvalue("br") >= qvalue("identity")
175 | }
176 |
177 | /**
178 | * Whether this response should be compressed. Responses that may not contain content won't be compressed, nor will
179 | * responses that already define a content encoding. Empty responses also shouldn't be compressed, as they will
180 | * actually always get bigger.
181 | */
182 | private def shouldCompress(result: Result) = isAllowedContent(result.header) &&
183 | isNotAlreadyCompressed(result.header) &&
184 | !result.body.isKnownEmpty
185 |
186 | /**
187 | * Certain response codes are forbidden by the HTTP spec to contain content, but a gzipped response always contains
188 | * a minimum of 20 bytes, even for empty responses.
189 | */
190 | private def isAllowedContent(header: ResponseHeader) = header.status != Status.NO_CONTENT && header.status != Status.NOT_MODIFIED
191 |
192 | /**
193 | * Of course, we don't want to double compress responses
194 | */
195 | private def isNotAlreadyCompressed(header: ResponseHeader) = header.headers.get(CONTENT_ENCODING).isEmpty
196 |
197 | // TODO modify
198 | private def setupHeader(header: Map[String, String]): Map[String, String] = {
199 | header + (CONTENT_ENCODING -> "br") + addToVaryHeader(header, VARY, ACCEPT_ENCODING)
200 | }
201 |
202 | /**
203 | * There may be an existing Vary value, which we must add to (comma separated)
204 | */
205 | private def addToVaryHeader(existingHeaders: Map[String, String], headerName: String, headerValue: String): (String, String) = {
206 | existingHeaders.get(headerName) match {
207 | case None => (headerName, headerValue)
208 | case Some(existing) if existing.split(",").exists(_.trim.equalsIgnoreCase(headerValue)) => (headerName, existing)
209 | case Some(existing) => (headerName, s"$existing,$headerValue")
210 | }
211 | }
212 | }
213 |
214 | /**
215 | * Configuration for the brotli filter
216 | *
217 | * @param quality The compression-speed vs compression-density tradeoffs. The higher the quality, the slower the compression. Range is 0 to 11
218 | * @param chunkedThreshold The content length threshold, after which the filter will switch to chunking the result.
219 | * @param shouldBrotli Whether the given request/result should be compressed with brotli. This can be used, for example, to implement
220 | * black/white lists for compressing by content type.
221 | */
222 | case class BrotliFilterConfig(quality: Int = 5, //TODO check default,
223 | bufferSize: Int = 8192,
224 | chunkedThreshold: Int = 102400,
225 | shouldBrotli: (RequestHeader, Result) => Boolean = (_, _) => true) {
226 |
227 | // alternate constructor and builder methods for Java
228 | def this() = this(shouldBrotli = (_, _) => true)
229 |
230 | def withShouldBrotli(shouldBrotli: (RequestHeader, Result) => Boolean): BrotliFilterConfig = copy(shouldBrotli = shouldBrotli)
231 |
232 | def withShouldBrotli(shouldBrotli: BiFunction[play.mvc.Http.RequestHeader, play.mvc.Result, Boolean]): BrotliFilterConfig =
233 | withShouldBrotli((req: RequestHeader, res: Result) => shouldBrotli.asScala(req.asJava, res.asJava))
234 |
235 | def withChunkedThreshold(threshold: Int): BrotliFilterConfig = copy(chunkedThreshold = threshold)
236 |
237 | def withBufferSize(size: Int): BrotliFilterConfig = copy(bufferSize = size)
238 |
239 | def withQuality(q: Int): BrotliFilterConfig = copy(quality = q)
240 | }
241 |
242 | object BrotliFilterConfig {
243 |
244 | def fromConfiguration(conf: Configuration) = {
245 |
246 | val config = conf.get[Configuration]("play.filters.brotli")
247 |
248 | BrotliFilterConfig(
249 | quality = config.get[Int]("quality"),
250 | chunkedThreshold = config.get[ConfigMemorySize]("chunkedThreshold").toBytes.toInt
251 | )
252 | }
253 | }
254 |
255 | /**
256 | * The brotli filter configuration provider.
257 | */
258 | @Singleton
259 | class BrotliFilterConfigProvider @Inject() (config: Configuration) extends Provider[BrotliFilterConfig] {
260 | lazy val get = BrotliFilterConfig.fromConfiguration(config)
261 | }
262 |
263 |
264 | /**
265 | * The brotli filter module.
266 | */
267 | class BrotliFilterModule extends Module {
268 |
269 | def bindings(environment: Environment, configuration: Configuration) = {
270 | Seq(
271 | bind[BrotliFilterConfig].toProvider[BrotliFilterConfigProvider],
272 | bind[BrotliFilter].toSelf
273 | )
274 | }
275 | }
276 |
277 | /**
278 | * The brotli filter components.
279 | */
280 | trait BrotliFilterComponents {
281 | def configuration: Configuration
282 | def materializer: Materializer
283 |
284 | lazy val brotliFilterConfig: BrotliFilterConfig = BrotliFilterConfig.fromConfiguration(configuration)
285 | lazy val brotliFilter: BrotliFilter = new BrotliFilter(brotliFilterConfig)(materializer)
286 | }
287 |
--------------------------------------------------------------------------------
/play-v30/src/main/scala/play.filters.brotli/BrotliFilter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Mariot Chauvin
3 | *
4 | * Mariot Chauvin licenses this file to you under the Apache License,
5 | * version 2.0 (the "License"); you may not use this file except in compliance
6 | * with the License. You may obtain a copy of the License at:
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 | * License for the specific language governing permissions and limitations
14 | * under the License.
15 | */
16 |
17 | package play.filters.brotli
18 |
19 | import java.util.function.BiFunction
20 | import javax.inject.{ Provider, Inject, Singleton }
21 |
22 | import org.apache.pekko.stream.{ OverflowStrategy, FlowShape, Materializer }
23 | import org.apache.pekko.stream.scaladsl._
24 | import org.apache.pekko.util.ByteString
25 | import com.typesafe.config.ConfigMemorySize
26 | import play.api.http.HttpProtocol
27 | import play.api.inject.Module
28 | import play.api.{ Environment, Configuration }
29 | import play.api.mvc._
30 | import play.api.libs.streams.BrotliFlow
31 | import scala.concurrent.{ Future, ExecutionContextExecutor}
32 | import play.api.http.{ HttpChunk, HttpEntity, Status }
33 | import scala.jdk.FunctionConverters._
34 |
35 | import com.aayushatharva.brotli4j.encoder.BrotliOutputStream;
36 | import com.aayushatharva.brotli4j.encoder.Encoder;
37 |
38 | /**
39 | * A brotli filter.
40 | *
41 | * This filter may compress with brotli the responses for any requests that aren't HEAD requests and specify an accept encoding of brotli.
42 | *
43 | * It won't compresss under the following conditions:
44 | *
45 | * - The response code is 204 or 304 (these codes MUST NOT contain a body, and an empty compressed response is 20 bytes
46 | * long)
47 | * - The response already defines a Content-Encoding header
48 | * - A custom shouldBrotli function is supplied and it returns false
49 | *
50 | * Since compressing changes the content length of the response, this filter may do some buffering - it will buffer any
51 | * streamed responses that define a content length less than the configured chunked threshold. Responses that are
52 | * greater in length, or that don't define a content length, will not be buffered, but will be sent as chunked
53 | * responses.
54 | */
55 | @Singleton
56 | class BrotliFilter @Inject() (config: BrotliFilterConfig)(implicit mat: Materializer) extends EssentialFilter {
57 |
58 |
59 | {
60 | brotli4s.Brotli.ensureAvailability()
61 | }
62 |
63 | import play.api.http.HeaderNames._
64 |
65 | def this(quality: Int = 5,
66 | bufferSize: Int = 8192,
67 | chunkedThreshold: Int = 102400,
68 | shouldBrotli: (RequestHeader, Result) => Boolean = (_, _) => true)(implicit mat: Materializer) = {
69 | this(BrotliFilterConfig(quality, bufferSize, chunkedThreshold, shouldBrotli))
70 | }
71 |
72 |
73 | def apply(next: EssentialAction) = new EssentialAction {
74 | implicit val ec: ExecutionContextExecutor = mat.executionContext
75 | def apply(request: RequestHeader) = {
76 | if (mayCompress(request)) {
77 | next(request).mapFuture(result => handleResult(request, result))
78 | } else {
79 | next(request)
80 | }
81 | }
82 | }
83 |
84 | private def handleResult(request: RequestHeader, result: Result): Future[Result] = {
85 | if (shouldCompress(result) && config.shouldBrotli(request, result)) {
86 | implicit val ec = mat.executionContext
87 | val header = result.header.copy(headers = setupHeader(result.header.headers))
88 |
89 | result.body match {
90 |
91 | case HttpEntity.Strict(data, contentType) =>
92 | Future.successful(result.copy(header = header, body = compressStrictEntity(data, contentType)))
93 |
94 |
95 | case entity @ HttpEntity.Streamed(_, Some(contentLength), contentType) if contentLength <= config.chunkedThreshold =>
96 | // It's below the chunked threshold, so buffer then compress and send
97 | entity.consumeData.map { data =>
98 | result.copy(header = header, body = compressStrictEntity(data, contentType))
99 | }
100 |
101 | case HttpEntity.Streamed(data, _, contentType) if request.version == HttpProtocol.HTTP_1_0 =>
102 | // It's above the chunked threshold, but we can't chunk it because we're using HTTP 1.0.
103 | // Instead, we use a close delimited body (ie, regular body with no content length)
104 | val compressed = data.via(createBrotliFlow)
105 | Future.successful(
106 | result.copy(header = header, body = HttpEntity.Streamed(compressed, None, contentType))
107 | )
108 |
109 | case HttpEntity.Streamed(data, _, contentType) =>
110 | // It's above the chunked threshold, compress through the brotli flow, and send as chunked
111 | val compressed = data.via(createBrotliFlow).map(d => HttpChunk.Chunk(d))
112 | Future.successful(result.copy(header = header, body = HttpEntity.Chunked(compressed, contentType)))
113 |
114 | case HttpEntity.Chunked(chunks, contentType) =>
115 | val wrappedFlow = Flow.fromGraph(GraphDSL.create[FlowShape[HttpChunk, HttpChunk]]() { implicit builder =>
116 | import GraphDSL.Implicits._
117 |
118 | val extractChunks = Flow[HttpChunk] collect { case HttpChunk.Chunk(data) => data }
119 |
120 | val foldData = Flow[ByteString].fold(ByteString.empty)((acc, x) => acc ++ x)
121 |
122 | val createChunks = Flow[ByteString].map[HttpChunk](HttpChunk.Chunk.apply)
123 | val filterLastChunk = Flow[HttpChunk]
124 | .filter(_.isInstanceOf[HttpChunk.LastChunk])
125 | // Since we're doing a merge by concatenating, the filter last chunk won't receive demand until the brotli
126 | // flow is finished. But the broadcast won't start broadcasting until both flows start demanding. So we
127 | // put a buffer of one in to ensure the filter last chunk flow demands from the broadcast.
128 | .buffer(1, OverflowStrategy.backpressure)
129 |
130 | val broadcast = builder.add(Broadcast[HttpChunk](2))
131 | val concat = builder.add(Concat[HttpChunk]())
132 |
133 | // Broadcast the stream through two separate flows, one that collects chunks and turns them into
134 | // ByteStrings, fold into one ByteString then sends that ByteString through the Brotli flow, and then turns them back into chunks,
135 | // the other that just allows the last chunk through. Then concat those two flows together.
136 | broadcast.out(0) ~> extractChunks ~> foldData ~> createBrotliFlow ~> createChunks ~> concat.in(0)
137 | broadcast.out(1) ~> filterLastChunk ~> concat.in(1)
138 |
139 | new FlowShape(broadcast.in, concat.out)
140 | })
141 | Future.successful(result.copy(header = header, body = HttpEntity.Chunked(chunks via wrappedFlow, contentType)))
142 | }
143 | } else {
144 | Future.successful(result)
145 | }
146 | }
147 |
148 |
149 | private def createBrotliFlow: Flow[ByteString, ByteString, _] =
150 | BrotliFlow.brotli(config.bufferSize, config.quality)
151 |
152 | private def compressStrictEntity(data: ByteString, contentType: Option[String]) = {
153 | val builder = ByteString.newBuilder
154 | val brotliParameters = new Encoder.Parameters().setQuality(config.quality)
155 | val gzipOs = new BrotliOutputStream(builder.asOutputStream, brotliParameters)
156 | gzipOs.write(data.toArray)
157 | gzipOs.close()
158 | HttpEntity.Strict(builder.result(), contentType)
159 | }
160 |
161 | /**
162 | * Whether this request may be compressed.
163 | */
164 | private def mayCompress(request: RequestHeader) =
165 | request.method != "HEAD" && brotliIsAcceptedAndPreferredBy(request)
166 |
167 | // TODO modify
168 | private def brotliIsAcceptedAndPreferredBy(request: RequestHeader) = {
169 | val codings = play.api.mvc.RequestHeader.acceptHeader(request.headers, ACCEPT_ENCODING)
170 | def explicitQValue(coding: String) = codings collectFirst { case (q, c) if c equalsIgnoreCase coding => q }
171 | def defaultQValue(coding: String) = if (coding == "identity") 0.001d else 0d
172 | def qvalue(coding: String) = explicitQValue(coding) orElse explicitQValue("*") getOrElse defaultQValue(coding)
173 |
174 | qvalue("br") > 0d && qvalue("br") >= qvalue("identity")
175 | }
176 |
177 | /**
178 | * Whether this response should be compressed. Responses that may not contain content won't be compressed, nor will
179 | * responses that already define a content encoding. Empty responses also shouldn't be compressed, as they will
180 | * actually always get bigger.
181 | */
182 | private def shouldCompress(result: Result) = isAllowedContent(result.header) &&
183 | isNotAlreadyCompressed(result.header) &&
184 | !result.body.isKnownEmpty
185 |
186 | /**
187 | * Certain response codes are forbidden by the HTTP spec to contain content, but a gzipped response always contains
188 | * a minimum of 20 bytes, even for empty responses.
189 | */
190 | private def isAllowedContent(header: ResponseHeader) = header.status != Status.NO_CONTENT && header.status != Status.NOT_MODIFIED
191 |
192 | /**
193 | * Of course, we don't want to double compress responses
194 | */
195 | private def isNotAlreadyCompressed(header: ResponseHeader) = header.headers.get(CONTENT_ENCODING).isEmpty
196 |
197 | // TODO modify
198 | private def setupHeader(header: Map[String, String]): Map[String, String] = {
199 | header + (CONTENT_ENCODING -> "br") + addToVaryHeader(header, VARY, ACCEPT_ENCODING)
200 | }
201 |
202 | /**
203 | * There may be an existing Vary value, which we must add to (comma separated)
204 | */
205 | private def addToVaryHeader(existingHeaders: Map[String, String], headerName: String, headerValue: String): (String, String) = {
206 | existingHeaders.get(headerName) match {
207 | case None => (headerName, headerValue)
208 | case Some(existing) if existing.split(",").exists(_.trim.equalsIgnoreCase(headerValue)) => (headerName, existing)
209 | case Some(existing) => (headerName, s"$existing,$headerValue")
210 | }
211 | }
212 | }
213 |
214 | /**
215 | * Configuration for the brotli filter
216 | *
217 | * @param quality The compression-speed vs compression-density tradeoffs. The higher the quality, the slower the compression. Range is 0 to 11
218 | * @param chunkedThreshold The content length threshold, after which the filter will switch to chunking the result.
219 | * @param shouldBrotli Whether the given request/result should be compressed with brotli. This can be used, for example, to implement
220 | * black/white lists for compressing by content type.
221 | */
222 | case class BrotliFilterConfig(quality: Int = 5, //TODO check default,
223 | bufferSize: Int = 8192,
224 | chunkedThreshold: Int = 102400,
225 | shouldBrotli: (RequestHeader, Result) => Boolean = (_, _) => true) {
226 |
227 | // alternate constructor and builder methods for Java
228 | def this() = this(shouldBrotli = (_, _) => true)
229 |
230 | def withShouldBrotli(shouldBrotli: (RequestHeader, Result) => Boolean): BrotliFilterConfig = copy(shouldBrotli = shouldBrotli)
231 |
232 | def withShouldBrotli(shouldBrotli: BiFunction[play.mvc.Http.RequestHeader, play.mvc.Result, Boolean]): BrotliFilterConfig =
233 | withShouldBrotli((req: RequestHeader, res: Result) => shouldBrotli.asScala(req.asJava, res.asJava))
234 |
235 | def withChunkedThreshold(threshold: Int): BrotliFilterConfig = copy(chunkedThreshold = threshold)
236 |
237 | def withBufferSize(size: Int): BrotliFilterConfig = copy(bufferSize = size)
238 |
239 | def withQuality(q: Int): BrotliFilterConfig = copy(quality = q)
240 | }
241 |
242 | object BrotliFilterConfig {
243 |
244 | def fromConfiguration(conf: Configuration) = {
245 |
246 | val config = conf.get[Configuration]("play.filters.brotli")
247 |
248 | BrotliFilterConfig(
249 | quality = config.get[Int]("quality"),
250 | chunkedThreshold = config.get[ConfigMemorySize]("chunkedThreshold").toBytes.toInt
251 | )
252 | }
253 | }
254 |
255 | /**
256 | * The brotli filter configuration provider.
257 | */
258 | @Singleton
259 | class BrotliFilterConfigProvider @Inject() (config: Configuration) extends Provider[BrotliFilterConfig] {
260 | lazy val get = BrotliFilterConfig.fromConfiguration(config)
261 | }
262 |
263 |
264 | /**
265 | * The brotli filter module.
266 | */
267 | class BrotliFilterModule extends Module {
268 |
269 | def bindings(environment: Environment, configuration: Configuration) = {
270 | Seq(
271 | bind[BrotliFilterConfig].toProvider[BrotliFilterConfigProvider],
272 | bind[BrotliFilter].toSelf
273 | )
274 | }
275 | }
276 |
277 | /**
278 | * The brotli filter components.
279 | */
280 | trait BrotliFilterComponents {
281 | def configuration: Configuration
282 | def materializer: Materializer
283 |
284 | lazy val brotliFilterConfig: BrotliFilterConfig = BrotliFilterConfig.fromConfiguration(configuration)
285 | lazy val brotliFilter: BrotliFilter = new BrotliFilter(brotliFilterConfig)(materializer)
286 | }
287 |
--------------------------------------------------------------------------------
/akka/src/test/scala/akka.stream.io.compression.brotli/CoderSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2022 Lightbend Inc.
3 | */
4 |
5 | package akka.stream.io.compression.brotli
6 |
7 | import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
8 | import java.util.concurrent.ThreadLocalRandom
9 | import java.util.zip.DataFormatException
10 |
11 | import scala.annotation.tailrec
12 | import scala.concurrent.Await
13 | import scala.concurrent.duration._
14 | import scala.util.control.NoStackTrace
15 |
16 | import org.scalatest.Inspectors
17 | import org.scalatest.wordspec.AnyWordSpec
18 |
19 | import akka.NotUsed
20 | import akka.stream.impl.io.compression.Compressor
21 | import akka.stream.scaladsl.{ Compression, Flow, Sink, Source }
22 | import akka.util.ByteString
23 |
24 | abstract class CoderSpec(codecName: String) extends AnyWordSpec with CodecSpecSupport with Inspectors {
25 | import CompressionTestingTools._
26 |
27 | protected def newCompressor(): Compressor
28 | protected def encoderFlow: Flow[ByteString, ByteString, Any]
29 | protected def decoderFlow(
30 | maxBytesPerChunk: Int = Compression.MaxBytesPerChunkDefault): Flow[ByteString, ByteString, Any]
31 |
32 | protected def newDecodedInputStream(underlying: InputStream): InputStream
33 | protected def newEncodedOutputStream(underlying: OutputStream): OutputStream
34 |
35 | case object AllDataAllowed extends Exception with NoStackTrace
36 | protected def corruptInputCheck: Boolean = true
37 |
38 | def extraTests(): Unit = {}
39 |
40 | s"The $codecName codec" should {
41 | /*"produce valid data on immediate finish" in {
42 | streamDecode(newCompressor().finish()) should readAs(emptyText)
43 | }
44 | "properly encode an empty string" in {
45 | streamDecode(ourEncode(emptyTextBytes)) should readAs(emptyText)
46 | }
47 | "properly decode an empty string" in {
48 | ourDecode(streamEncode(emptyTextBytes)) should readAs(emptyText)
49 | }
50 | "properly round-trip encode/decode an empty string" in {
51 | ourDecode(ourEncode(emptyTextBytes)) should readAs(emptyText)
52 | }*/
53 | "properly encode a small string" in {
54 | streamDecode(ourEncode(smallTextBytes)) should readAs(smallText)
55 | }
56 | "properly decode a small string" in {
57 | ourDecode(streamEncode(smallTextBytes)) should readAs(smallText)
58 | }
59 | "properly round-trip encode/decode a small string" in {
60 | ourDecode(ourEncode(smallTextBytes)) should readAs(smallText)
61 | }
62 | "properly encode a large string" in {
63 | streamDecode(ourEncode(largeTextBytes)) should readAs(largeText)
64 | }
65 | "properly decode a large string" in {
66 | ourDecode(streamEncode(largeTextBytes)) should readAs(largeText)
67 | }
68 | "properly round-trip encode/decode a large string" in {
69 | ourDecode(ourEncode(largeTextBytes)) should readAs(largeText)
70 | }
71 |
72 | if (corruptInputCheck) {
73 | "throw an error on corrupt input" in {
74 | (the[RuntimeException] thrownBy {
75 | ourDecode(corruptContent)
76 | }).ultimateCause should be(a[DataFormatException])
77 | }
78 | }
79 |
80 | "decompress in very small chunks" in {
81 | val compressed = encode("Hello")
82 |
83 | decodeChunks(Source(Vector(compressed.take(10), compressed.drop(10)))) should readAs("Hello")
84 | }
85 | "support chunked round-trip encoding/decoding" in {
86 | val chunks = largeTextBytes.grouped(512).toVector
87 | val comp = newCompressor()
88 | val compressedChunks = chunks.map { chunk =>
89 | comp.compressAndFlush(chunk)
90 | } :+ comp.finish()
91 | val uncompressed = decodeFromIterator(() => compressedChunks.iterator)
92 |
93 | uncompressed should readAs(largeText)
94 | }
95 | "works for any split in prefix + suffix" in {
96 | val compressed = streamEncode(smallTextBytes)
97 | def tryWithPrefixOfSize(prefixSize: Int): Unit = {
98 | val prefix = compressed.take(prefixSize)
99 | val suffix = compressed.drop(prefixSize)
100 |
101 | decodeChunks(Source(prefix :: suffix :: Nil)) should readAs(smallText)
102 | }
103 | (0 to compressed.size).foreach(tryWithPrefixOfSize)
104 | }
105 | "works for chunked compressed data of sizes just above 1024" in {
106 | val comp = newCompressor()
107 | val inputBytes = ByteString(
108 | """{"baseServiceURL":"http://www.acme.com","endpoints":{"assetSearchURL":"/search","showsURL":"/shows","mediaContainerDetailURL":"/container","featuredTapeURL":"/tape","assetDetailURL":"/asset","moviesURL":"/movies","recentlyAddedURL":"/recent","topicsURL":"/topics","scheduleURL":"/schedule"},"urls":{"aboutAweURL":"www.foobar.com"},"channelName":"Cool Stuff","networkId":"netId","slotProfile":"slot_1","brag":{"launchesUntilPrompt":10,"daysUntilPrompt":5,"launchesUntilReminder":5,"daysUntilReminder":2},"feedbackEmailAddress":"feedback@acme.com","feedbackEmailSubject":"Commends from User","splashSponsor":[],"adProvider":{"adProviderProfile":"","adProviderProfileAndroid":"","adProviderNetworkID":0,"adProviderSiteSectionNetworkID":0,"adProviderVideoAssetNetworkID":0,"adProviderSiteSectionCustomID":{},"adProviderServerURL":"","adProviderLiveVideoAssetID":""},"update":[{"forPlatform":"ios","store":{"iTunes":"www.something.com"},"minVer":"1.2.3","notificationVer":"1.2.5"},{"forPlatform":"android","store":{"amazon":"www.something.com","play":"www.something.com"},"minVer":"1.2.3","notificationVer":"1.2.5"}],"tvRatingPolicies":[{"type":"sometype","imageKey":"tv_rating_small","durationMS":15000,"precedence":1},{"type":"someothertype","imageKey":"tv_rating_big","durationMS":15000,"precedence":2}],"exts":{"adConfig":{"globals":{"#{adNetworkID}":"2620","#{ssid}":"usa_tveapp"},"iPad":{"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/shows","adSize":[{"#{height}":90,"#{width}":728}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad&sz=1x1&t=&c=#{doubleclickrandom}"},"watchwithshowtile":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/watchwithshowtile","adSize":[{"#{height}":120,"#{width}":240}]},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"iPadRetina":{"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/shows","adSize":[{"#{height}":90,"#{width}":728}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad&sz=1x1&t=&c=#{doubleclickrandom}"},"watchwithshowtile":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/watchwithshowtile","adSize":[{"#{height}":120,"#{width}":240}]},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"iPhone":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"iPhoneRetina":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"Tablet":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/home","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"TabletHD":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/home","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"Phone":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_android/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"PhoneHD":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_android/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}}}}}""",
109 | "utf8")
110 | val compressed = comp.compressAndFinish(inputBytes)
111 |
112 | ourDecode(compressed) should equal(inputBytes)
113 | }
114 |
115 | "shouldn't produce huge ByteStrings for some input" in {
116 | val array = Array.fill(10)(1.toByte)
117 | val compressed = streamEncode(ByteString(array))
118 | val limit = 10000
119 | val resultBs =
120 | Source
121 | .single(compressed)
122 | .via(decoderFlow(maxBytesPerChunk = limit))
123 | .limit(4200)
124 | .runWith(Sink.seq)
125 | .awaitResult(3.seconds)
126 |
127 | forAll(resultBs) { bs =>
128 | bs.length should be < limit
129 | bs.forall(_ == 1) should equal(true)
130 | }
131 | }
132 |
133 | /**
134 | * The below test assume a Flow will preserve chunking input which is not a property.
135 | * BrotliFlow would generate a single ByteString instance
136 | *
137 | "be able to decode chunk-by-chunk (depending on input chunks)" in {
138 | val minLength = 100
139 | val maxLength = 1000
140 | val numElements = 1000
141 |
142 | val random = ThreadLocalRandom.current()
143 | val sizes = Seq.fill(numElements)(random.nextInt(minLength, maxLength))
144 | def createByteString(size: Int): ByteString =
145 | ByteString(Array.fill(size)(1.toByte))
146 |
147 | val sizesAfterRoundtrip =
148 | Source
149 | .fromIterator(() => sizes.iterator.map(createByteString))
150 | .via(encoderFlow)
151 | .via(decoderFlow())
152 | .runFold(Seq.empty[Int])(_ :+ _.size)
153 |
154 | sizesAfterRoundtrip.awaitResult(3.seconds) shouldEqual sizes
155 | }*/
156 |
157 | extraTests()
158 | }
159 |
160 | def encode(s: String) = ourEncode(ByteString(s, "UTF8"))
161 | def ourEncode(bytes: ByteString): ByteString = newCompressor().compressAndFinish(bytes)
162 | def ourDecode(bytes: ByteString): ByteString =
163 | Source.single(bytes).via(decoderFlow()).join.awaitResult(3.seconds)
164 |
165 | lazy val corruptContent = {
166 | val content = encode(largeText).toArray
167 | content(14) = 36.toByte
168 | ByteString(content)
169 | }
170 |
171 | def streamEncode(bytes: ByteString): ByteString = {
172 | val output = new ByteArrayOutputStream()
173 | val gos = newEncodedOutputStream(output); gos.write(bytes.toArray); gos.close()
174 | ByteString(output.toByteArray)
175 | }
176 |
177 | def streamDecode(bytes: ByteString): ByteString = {
178 | val output = new ByteArrayOutputStream()
179 | val input = newDecodedInputStream(new ByteArrayInputStream(bytes.toArray))
180 |
181 | val buffer = new Array[Byte](500)
182 | @tailrec def copy(from: InputStream, to: OutputStream): Unit = {
183 | val read = from.read(buffer)
184 | if (read >= 0) {
185 | to.write(buffer, 0, read)
186 | copy(from, to)
187 | }
188 | }
189 |
190 | copy(input, output)
191 | ByteString(output.toByteArray)
192 | }
193 |
194 | def decodeChunks(input: Source[ByteString, NotUsed]): ByteString =
195 | input.via(decoderFlow()).join.awaitResult(3.seconds) // TODO make it use remaining?
196 |
197 | def decodeFromIterator(iterator: () => Iterator[ByteString]): ByteString =
198 | Await.result(Source.fromIterator(iterator).via(decoderFlow()).join, 3.seconds)
199 | }
--------------------------------------------------------------------------------
/pekko/src/test/scala/org.apache.pekko.stream.io.compression.brotli/CoderSpec.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (C) 2009-2023 Lightbend Inc.
3 | */
4 |
5 | package org.apache.pekko.stream.io.compression.brotli
6 |
7 | import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
8 | import java.util.concurrent.ThreadLocalRandom
9 | import java.util.zip.DataFormatException
10 |
11 | import scala.annotation.tailrec
12 | import scala.concurrent.Await
13 | import scala.concurrent.duration._
14 | import scala.util.control.NoStackTrace
15 |
16 | import org.scalatest.Inspectors
17 | import org.scalatest.wordspec.AnyWordSpec
18 |
19 | import org.apache.pekko.NotUsed
20 | import org.apache.pekko.stream.impl.io.compression.Compressor
21 | import org.apache.pekko.stream.scaladsl.{ Compression, Flow, Sink, Source }
22 | import org.apache.pekko.util.ByteString
23 |
24 | abstract class CoderSpec(codecName: String) extends AnyWordSpec with CodecSpecSupport with Inspectors {
25 | import CompressionTestingTools._
26 |
27 | protected def newCompressor(): Compressor
28 | protected def encoderFlow: Flow[ByteString, ByteString, Any]
29 | protected def decoderFlow(
30 | maxBytesPerChunk: Int = Compression.MaxBytesPerChunkDefault): Flow[ByteString, ByteString, Any]
31 |
32 | protected def newDecodedInputStream(underlying: InputStream): InputStream
33 | protected def newEncodedOutputStream(underlying: OutputStream): OutputStream
34 |
35 | case object AllDataAllowed extends Exception with NoStackTrace
36 | protected def corruptInputCheck: Boolean = true
37 |
38 | def extraTests(): Unit = {}
39 |
40 | s"The $codecName codec" should {
41 | /*"produce valid data on immediate finish" in {
42 | streamDecode(newCompressor().finish()) should readAs(emptyText)
43 | }
44 | "properly encode an empty string" in {
45 | streamDecode(ourEncode(emptyTextBytes)) should readAs(emptyText)
46 | }
47 | "properly decode an empty string" in {
48 | ourDecode(streamEncode(emptyTextBytes)) should readAs(emptyText)
49 | }
50 | "properly round-trip encode/decode an empty string" in {
51 | ourDecode(ourEncode(emptyTextBytes)) should readAs(emptyText)
52 | }*/
53 | "properly encode a small string" in {
54 | streamDecode(ourEncode(smallTextBytes)) should readAs(smallText)
55 | }
56 | "properly decode a small string" in {
57 | ourDecode(streamEncode(smallTextBytes)) should readAs(smallText)
58 | }
59 | "properly round-trip encode/decode a small string" in {
60 | ourDecode(ourEncode(smallTextBytes)) should readAs(smallText)
61 | }
62 | "properly encode a large string" in {
63 | streamDecode(ourEncode(largeTextBytes)) should readAs(largeText)
64 | }
65 | "properly decode a large string" in {
66 | ourDecode(streamEncode(largeTextBytes)) should readAs(largeText)
67 | }
68 | "properly round-trip encode/decode a large string" in {
69 | ourDecode(ourEncode(largeTextBytes)) should readAs(largeText)
70 | }
71 |
72 | if (corruptInputCheck) {
73 | "throw an error on corrupt input" in {
74 | (the[RuntimeException] thrownBy {
75 | ourDecode(corruptContent)
76 | }).ultimateCause should be(a[DataFormatException])
77 | }
78 | }
79 |
80 | "decompress in very small chunks" in {
81 | val compressed = encode("Hello")
82 |
83 | decodeChunks(Source(Vector(compressed.take(10), compressed.drop(10)))) should readAs("Hello")
84 | }
85 | "support chunked round-trip encoding/decoding" in {
86 | val chunks = largeTextBytes.grouped(512).toVector
87 | val comp = newCompressor()
88 | val compressedChunks = chunks.map { chunk =>
89 | comp.compressAndFlush(chunk)
90 | } :+ comp.finish()
91 | val uncompressed = decodeFromIterator(() => compressedChunks.iterator)
92 |
93 | uncompressed should readAs(largeText)
94 | }
95 | "works for any split in prefix + suffix" in {
96 | val compressed = streamEncode(smallTextBytes)
97 | def tryWithPrefixOfSize(prefixSize: Int): Unit = {
98 | val prefix = compressed.take(prefixSize)
99 | val suffix = compressed.drop(prefixSize)
100 |
101 | decodeChunks(Source(prefix :: suffix :: Nil)) should readAs(smallText)
102 | }
103 | (0 to compressed.size).foreach(tryWithPrefixOfSize)
104 | }
105 | "works for chunked compressed data of sizes just above 1024" in {
106 | val comp = newCompressor()
107 | val inputBytes = ByteString(
108 | """{"baseServiceURL":"http://www.acme.com","endpoints":{"assetSearchURL":"/search","showsURL":"/shows","mediaContainerDetailURL":"/container","featuredTapeURL":"/tape","assetDetailURL":"/asset","moviesURL":"/movies","recentlyAddedURL":"/recent","topicsURL":"/topics","scheduleURL":"/schedule"},"urls":{"aboutAweURL":"www.foobar.com"},"channelName":"Cool Stuff","networkId":"netId","slotProfile":"slot_1","brag":{"launchesUntilPrompt":10,"daysUntilPrompt":5,"launchesUntilReminder":5,"daysUntilReminder":2},"feedbackEmailAddress":"feedback@acme.com","feedbackEmailSubject":"Commends from User","splashSponsor":[],"adProvider":{"adProviderProfile":"","adProviderProfileAndroid":"","adProviderNetworkID":0,"adProviderSiteSectionNetworkID":0,"adProviderVideoAssetNetworkID":0,"adProviderSiteSectionCustomID":{},"adProviderServerURL":"","adProviderLiveVideoAssetID":""},"update":[{"forPlatform":"ios","store":{"iTunes":"www.something.com"},"minVer":"1.2.3","notificationVer":"1.2.5"},{"forPlatform":"android","store":{"amazon":"www.something.com","play":"www.something.com"},"minVer":"1.2.3","notificationVer":"1.2.5"}],"tvRatingPolicies":[{"type":"sometype","imageKey":"tv_rating_small","durationMS":15000,"precedence":1},{"type":"someothertype","imageKey":"tv_rating_big","durationMS":15000,"precedence":2}],"exts":{"adConfig":{"globals":{"#{adNetworkID}":"2620","#{ssid}":"usa_tveapp"},"iPad":{"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/shows","adSize":[{"#{height}":90,"#{width}":728}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad&sz=1x1&t=&c=#{doubleclickrandom}"},"watchwithshowtile":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/watchwithshowtile","adSize":[{"#{height}":120,"#{width}":240}]},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"iPadRetina":{"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/shows","adSize":[{"#{height}":90,"#{width}":728}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad&sz=1x1&t=&c=#{doubleclickrandom}"},"watchwithshowtile":{"adMobAdUnitID":"/2620/usa_tveapp_ipad/watchwithshowtile","adSize":[{"#{height}":120,"#{width}":240}]},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_ipad/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"iPhone":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"iPhoneRetina":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_iphone/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"Tablet":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/home","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"TabletHD":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/home","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}","adSize":[{"#{height}":90,"#{width}":728},{"#{height}":50,"#{width}":320},{"#{height}":50,"#{width}":300}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_androidtab/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"Phone":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_android/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}},"PhoneHD":{"home":{"adMobAdUnitID":"/2620/usa_tveapp_android/home","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"showlist":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"episodepage":{"adMobAdUnitID":"/2620/usa_tveapp_android/shows/#{SHOW_NAME}","adSize":[{"#{height}":50,"#{width}":300},{"#{height}":50,"#{width}":320}]},"launch":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android&sz=1x1&t=&c=#{doubleclickrandom}"},"showpage":{"doubleClickCallbackURL":"http://pubads.g.doubleclick.net/gampad/ad?iu=/2620/usa_tveapp_android/shows/#{SHOW_NAME}&sz=1x1&t=&c=#{doubleclickrandom}"}}}}}""",
109 | "utf8")
110 | val compressed = comp.compressAndFinish(inputBytes)
111 |
112 | ourDecode(compressed) should equal(inputBytes)
113 | }
114 |
115 | "shouldn't produce huge ByteStrings for some input" in {
116 | val array = Array.fill(10)(1.toByte)
117 | val compressed = streamEncode(ByteString(array))
118 | val limit = 10000
119 | val resultBs =
120 | Source
121 | .single(compressed)
122 | .via(decoderFlow(maxBytesPerChunk = limit))
123 | .limit(4200)
124 | .runWith(Sink.seq)
125 | .awaitResult(3.seconds)
126 |
127 | forAll(resultBs) { bs =>
128 | bs.length should be < limit
129 | bs.forall(_ == 1) should equal(true)
130 | }
131 | }
132 |
133 | /**
134 | * The below test assume a Flow will preserve chunking input which is not a property.
135 | * BrotliFlow would generate a single ByteString instance
136 | *
137 | "be able to decode chunk-by-chunk (depending on input chunks)" in {
138 | val minLength = 100
139 | val maxLength = 1000
140 | val numElements = 1000
141 |
142 | val random = ThreadLocalRandom.current()
143 | val sizes = Seq.fill(numElements)(random.nextInt(minLength, maxLength))
144 | def createByteString(size: Int): ByteString =
145 | ByteString(Array.fill(size)(1.toByte))
146 |
147 | val sizesAfterRoundtrip =
148 | Source
149 | .fromIterator(() => sizes.iterator.map(createByteString))
150 | .via(encoderFlow)
151 | .via(decoderFlow())
152 | .runFold(Seq.empty[Int])(_ :+ _.size)
153 |
154 | sizesAfterRoundtrip.awaitResult(3.seconds) shouldEqual sizes
155 | }*/
156 |
157 | extraTests()
158 | }
159 |
160 | def encode(s: String) = ourEncode(ByteString(s, "UTF8"))
161 | def ourEncode(bytes: ByteString): ByteString = newCompressor().compressAndFinish(bytes)
162 | def ourDecode(bytes: ByteString): ByteString =
163 | Source.single(bytes).via(decoderFlow()).join.awaitResult(3.seconds)
164 |
165 | lazy val corruptContent = {
166 | val content = encode(largeText).toArray
167 | content(14) = 36.toByte
168 | ByteString(content)
169 | }
170 |
171 | def streamEncode(bytes: ByteString): ByteString = {
172 | val output = new ByteArrayOutputStream()
173 | val gos = newEncodedOutputStream(output); gos.write(bytes.toArray); gos.close()
174 | ByteString(output.toByteArray)
175 | }
176 |
177 | def streamDecode(bytes: ByteString): ByteString = {
178 | val output = new ByteArrayOutputStream()
179 | val input = newDecodedInputStream(new ByteArrayInputStream(bytes.toArray))
180 |
181 | val buffer = new Array[Byte](500)
182 | @tailrec def copy(from: InputStream, to: OutputStream): Unit = {
183 | val read = from.read(buffer)
184 | if (read >= 0) {
185 | to.write(buffer, 0, read)
186 | copy(from, to)
187 | }
188 | }
189 |
190 | copy(input, output)
191 | ByteString(output.toByteArray)
192 | }
193 |
194 | def decodeChunks(input: Source[ByteString, NotUsed]): ByteString =
195 | input.via(decoderFlow()).join.awaitResult(3.seconds) // TODO make it use remaining?
196 |
197 | def decodeFromIterator(iterator: () => Iterator[ByteString]): ByteString =
198 | Await.result(Source.fromIterator(iterator).via(decoderFlow()).join, 3.seconds)
199 | }
--------------------------------------------------------------------------------