├── .gitignore
├── legacy-processor-1
├── project
│ └── build.properties
├── .scalafmt.conf
├── src
│ └── main
│ │ ├── resources
│ │ └── application.conf
│ │ └── scala
│ │ └── io
│ │ └── ticofab
│ │ └── legacyprocessor1
│ │ ├── LegacyProcessor1App.scala
│ │ ├── RSocketSink.scala
│ │ └── RSocketSource.scala
├── README.md
├── build.sbt
└── .gitignore
├── sources-producer
├── project
│ └── build.properties
├── src
│ └── main
│ │ ├── resources
│ │ └── application.conf
│ │ └── scala
│ │ └── io
│ │ └── ticofab
│ │ └── sourcesproducer
│ │ └── SourcesProducer.scala
├── README.md
├── build.sbt
└── .gitignore
├── reactive-pipeline.png
├── legacy-processor-2
├── legacy-processor-2.iml
├── README.md
├── .gitignore
├── pom.xml
└── src
│ └── main
│ └── java
│ └── io
│ └── ticofab
│ └── legacyprocessor2
│ └── LegacyProcessor2.java
├── infra
├── README.md
└── docker-compose.yaml
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 |
--------------------------------------------------------------------------------
/legacy-processor-1/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 1.4.5
--------------------------------------------------------------------------------
/sources-producer/project/build.properties:
--------------------------------------------------------------------------------
1 | sbt.version = 1.4.5
--------------------------------------------------------------------------------
/legacy-processor-1/.scalafmt.conf:
--------------------------------------------------------------------------------
1 | version = 2.5.0
2 | maxColumn = 100
3 | align.preset = more
--------------------------------------------------------------------------------
/reactive-pipeline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ticofab/reactive-pipeline-rsocket/HEAD/reactive-pipeline.png
--------------------------------------------------------------------------------
/legacy-processor-2/legacy-processor-2.iml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/sources-producer/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | our-kafka-consumer: ${akka.kafka.consumer} {
2 | kafka-clients {
3 | bootstrap.servers = "localhost:9092"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/legacy-processor-1/src/main/resources/application.conf:
--------------------------------------------------------------------------------
1 | our-kafka-consumer: ${akka.kafka.consumer} {
2 | kafka-clients {
3 | bootstrap.servers = "localhost:9092"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/sources-producer/README.md:
--------------------------------------------------------------------------------
1 | # sources-producer
2 |
3 | After bringing up the infrastructure from the Infra project, this project pushes data into the first Kafka topic.
4 | Run with
5 |
6 | ```
7 | sbt run
8 | ```
9 |
--------------------------------------------------------------------------------
/legacy-processor-2/README.md:
--------------------------------------------------------------------------------
1 | # legacy-processor-2
2 |
3 | This app receives data from a RSocket faucet and publishes items to a Kafka topic after processing.
4 |
5 | ```
6 | RSocket --> internal logic --> Kafka
7 | ```
8 |
9 | Launch with
10 |
11 | ```
12 | mvn clean compile exec:java
13 | ```
14 |
--------------------------------------------------------------------------------
/infra/README.md:
--------------------------------------------------------------------------------
1 | # Infra
2 |
3 | To spin up the infra, have docker installed and from the project root launch
4 |
5 | ```
6 | docker-compose up
7 | ```
8 |
9 | To stop execution, first stop everything with CTRL-C and then
10 |
11 | ```
12 | docker-compose down
13 | ```
14 |
15 | to dismantle everything.
16 |
--------------------------------------------------------------------------------
/legacy-processor-1/README.md:
--------------------------------------------------------------------------------
1 | # legacy-processor-1
2 |
3 | The purpose of this project is to link a external data source to an external data consumer after some internal processing.
4 | Currently
5 |
6 | ```
7 | Kafka --> internal logic --> RSocket
8 | ```
9 |
10 | Launch with
11 |
12 | ```
13 | sbt run
14 | ```
15 |
--------------------------------------------------------------------------------
/sources-producer/build.sbt:
--------------------------------------------------------------------------------
1 | name := "sources-producer"
2 |
3 | version := "0.1"
4 |
5 | scalaVersion := "2.13.4"
6 |
7 | val AkkaVersion = "2.6.10"
8 | val AlpakkaVersion = "2.0.5"
9 | libraryDependencies ++= Seq(
10 | "com.typesafe.akka" %% "akka-stream-kafka" % AlpakkaVersion,
11 | "com.typesafe.akka" %% "akka-stream" % AkkaVersion,
12 | "org.slf4j" % "slf4j-simple" % "1.7.28" // TODO use nop
13 | )
--------------------------------------------------------------------------------
/legacy-processor-1/build.sbt:
--------------------------------------------------------------------------------
1 | name := "legacy-processor-1"
2 |
3 | version := "0.1"
4 |
5 | scalaVersion := "2.13.4"
6 |
7 | val AkkaVersion = "2.6.10"
8 | val AlpakkaVersion = "2.0.5"
9 | val RSocketVersion = "1.1.0"
10 |
11 | libraryDependencies ++= Seq(
12 | "com.typesafe.akka" %% "akka-stream-kafka" % AlpakkaVersion,
13 | "com.typesafe.akka" %% "akka-stream" % AkkaVersion,
14 | "org.slf4j" % "slf4j-simple" % "1.7.28", // TODO use nop
15 | "io.rsocket" % "rsocket-core" % RSocketVersion,
16 | "io.rsocket" % "rsocket-transport-netty" % RSocketVersion
17 | )
--------------------------------------------------------------------------------
/sources-producer/.gitignore:
--------------------------------------------------------------------------------
1 | # sbt
2 | lib_managed
3 | project/project
4 | target
5 | .bsp/sbt.json
6 |
7 | # Worksheets (Eclipse or IntelliJ)
8 | *.sc
9 |
10 | # Eclipse
11 | .cache*
12 | .classpath
13 | .project
14 | .scala_dependencies
15 | .settings
16 | .target
17 | .worksheet
18 |
19 | # IntelliJ
20 | .idea
21 |
22 | # ENSIME
23 | .ensime
24 | .ensime_lucene
25 | .ensime_cache
26 |
27 | # Mac
28 | .DS_Store
29 |
30 | # Metrics
31 | native/
32 |
33 | # Log files
34 | *.log
35 | *.log.gz
36 |
37 | # Akka Persistence
38 | snapshots
39 |
40 | # jenv
41 | .java-version
42 |
43 | # nohup
44 | nohup.out
45 |
--------------------------------------------------------------------------------
/legacy-processor-1/.gitignore:
--------------------------------------------------------------------------------
1 | # sbt
2 | lib_managed
3 | project/project
4 | target
5 | .bsp/sbt.json
6 |
7 | # Worksheets (Eclipse or IntelliJ)
8 | *.sc
9 |
10 | # Eclipse
11 | .cache*
12 | .classpath
13 | .project
14 | .scala_dependencies
15 | .settings
16 | .target
17 | .worksheet
18 |
19 | # IntelliJ
20 | .idea
21 |
22 | # ENSIME
23 | .ensime
24 | .ensime_lucene
25 | .ensime_cache
26 |
27 | # Mac
28 | .DS_Store
29 |
30 | # Metrics
31 | native/
32 |
33 | # Log files
34 | *.log
35 | *.log.gz
36 |
37 | # Akka Persistence
38 | snapshots
39 |
40 | # jenv
41 | .java-version
42 |
43 | # nohup
44 | nohup.out
45 |
--------------------------------------------------------------------------------
/legacy-processor-2/.gitignore:
--------------------------------------------------------------------------------
1 | # sbt
2 | lib_managed
3 | project/project
4 | target
5 | .bsp/sbt.json
6 |
7 | # Worksheets (Eclipse or IntelliJ)
8 | *.sc
9 |
10 | # Eclipse
11 | .cache*
12 | .classpath
13 | .project
14 | .scala_dependencies
15 | .settings
16 | .target
17 | .worksheet
18 |
19 | # IntelliJ
20 | .idea
21 |
22 | # ENSIME
23 | .ensime
24 | .ensime_lucene
25 | .ensime_cache
26 |
27 | # Mac
28 | .DS_Store
29 |
30 | # Metrics
31 | native/
32 |
33 | # Log files
34 | *.log
35 | *.log.gz
36 |
37 | # Akka Persistence
38 | snapshots
39 |
40 | # jenv
41 | .java-version
42 |
43 | # nohup
44 | nohup.out
45 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Reactive Pipeline w/ Kafka, Akka Streams, RSocket
2 |
3 | This is the repository for the talk "Retrofit your Java App with a Reactive Flow Pipeline" by [Mary Grygleski](https://twitter.com/mgrygles) and [Fabio Tiriticco](https://twitter.com/ticofab).
4 |
5 | These projects implement the pipeline shown in the image below:
6 |
7 | 
8 |
9 | Data is ingested from a Kafka topic by a Scala application, processed and passed using [RSocket](http://rsocket.io) to the next application, written in Java. The purpose is to show some techiques to implement a pipeline that adheres to the Reactive Streams specification throughout the flow.
10 |
11 |
--------------------------------------------------------------------------------
/infra/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.4"
2 |
3 | # To start this file -> $ docker-compose up
4 |
5 | services:
6 | #
7 | # Kafka
8 | #
9 | kafka:
10 | image: krisgeus/docker-kafka
11 | ports:
12 | - "2181:2181"
13 | - "9092:9092"
14 | restart: "no"
15 | environment:
16 | LISTENERS: "INTERNAL://:29092,EXTERNAL://:9092"
17 | ADVERTISED_LISTENERS: "INTERNAL://kafka:29092,EXTERNAL://localhost:9092"
18 | SECURITY_PROTOCOL_MAP: "INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT"
19 | INTER_BROKER: "INTERNAL"
20 | KAFKA_CREATE_TOPICS: "FirstTopic:1:1,SecondTopic:1:1"
21 |
22 | #
23 | # Kafka UI
24 | # kafdrop accessible via: http://localhost:9093/
25 | #
26 | kafdrop:
27 | image: obsidiandynamics/kafdrop
28 | restart: "no"
29 | ports:
30 | - "9093:9000"
31 | environment:
32 | KAFKA_BROKERCONNECT: "kafka:29092"
33 | JVM_OPTS: "-Xms16M -Xmx48M -Xss180K -XX:-TieredCompilation -XX:+UseStringDeduplication -noverify"
34 | depends_on:
35 | - "kafka"
36 |
--------------------------------------------------------------------------------
/sources-producer/src/main/scala/io/ticofab/sourcesproducer/SourcesProducer.scala:
--------------------------------------------------------------------------------
1 | package io.ticofab.sourcesproducer
2 |
3 | import akka.actor.ActorSystem
4 | import akka.kafka.ProducerSettings
5 | import akka.kafka.scaladsl.Producer
6 | import akka.stream.scaladsl.Source
7 | import org.apache.kafka.clients.producer.ProducerRecord
8 | import org.apache.kafka.common.serialization.StringSerializer
9 |
10 | import java.time.LocalTime
11 | import java.time.format.DateTimeFormatter
12 | import scala.concurrent.duration.DurationInt
13 |
14 |
15 | object SourcesProducer extends App {
16 | implicit val system: ActorSystem = ActorSystem("SourcesProducer")
17 | val config = system.settings.config.getConfig("akka.kafka.producer")
18 | val producerSettings = ProducerSettings(config, new StringSerializer, new StringSerializer)
19 | .withBootstrapServers("localhost:9092")
20 |
21 | val formatter = DateTimeFormatter.ofPattern("HH:mm:ss")
22 |
23 | Source
24 | .tick(1.second, 1.second, ())
25 | .map(_ => LocalTime.now.format(formatter))
26 | .map(now => {
27 | val msg = "message-created-at-" + now
28 | println(msg)
29 | new ProducerRecord[String, String]("FirstTopic", msg)
30 | })
31 | .runWith(Producer.plainSink(producerSettings))
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/legacy-processor-2/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
5 | 4.0.0
6 |
7 | org.example
8 | legacy-processor-2
9 | 1.0-SNAPSHOT
10 |
11 |
12 | 11
13 | 11
14 | io.ticofab.legacyprocessor2.LegacyProcessor2
15 |
16 |
17 |
18 |
19 |
20 | io.rsocket
21 | rsocket-core
22 | 1.1.0
23 |
24 |
25 |
26 |
27 | io.rsocket
28 | rsocket-transport-netty
29 | 1.1.0
30 |
31 |
32 |
33 |
34 | org.slf4j
35 | slf4j-simple
36 | 1.7.30
37 |
38 |
39 |
40 |
41 | org.apache.kafka
42 | kafka-clients
43 | 2.7.0
44 |
45 |
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/legacy-processor-1/src/main/scala/io/ticofab/legacyprocessor1/LegacyProcessor1App.scala:
--------------------------------------------------------------------------------
1 | package io.ticofab.legacyprocessor1
2 |
3 | import akka.actor.ActorSystem
4 | import akka.kafka.scaladsl.Consumer
5 | import akka.kafka.{ConsumerSettings, Subscriptions}
6 | import akka.stream.scaladsl.Sink
7 | import org.apache.kafka.clients.consumer.ConsumerRecord
8 | import org.apache.kafka.common.TopicPartition
9 | import org.apache.kafka.common.serialization.StringDeserializer
10 |
11 | object LegacyProcessor1App extends App {
12 |
13 | println("Legacy Processor 1 starting.")
14 | implicit val actorSystem: ActorSystem = ActorSystem("legacyProcessor1")
15 |
16 | // settings to consume from a kafka topic
17 | val config = actorSystem.settings.config.getConfig("our-kafka-consumer")
18 | val consumerSettings = ConsumerSettings(config, new StringDeserializer, new StringDeserializer)
19 | val kafkaSubscription = Subscriptions.assignmentWithOffset(new TopicPartition("FirstTopic", 0), 0)
20 |
21 | // the RSocket sink that will propagate items downstream
22 | val rSocketSink = Sink.fromGraph(new RSocketSink(7000))
23 |
24 | // connects to a running kafka topics and consumes from there
25 | Consumer
26 | .plainSource(consumerSettings, kafkaSubscription)
27 |
28 | // process one item (this encapsulates our legacy logic
29 | .map(processItem)
30 |
31 | // push the outcome of the processing
32 | .to(rSocketSink)
33 |
34 | // triggers execution of the stream we built
35 | .run()
36 |
37 | // simulates processing of an item received from the topic
38 | def processItem(msg: ConsumerRecord[String, String]) = {
39 | println(s"read from kafka: '${msg.value}' at offset ${msg.offset}")
40 | receivedElements += 1
41 | "message-" + receivedElements
42 | }
43 |
44 | // keep track of received elements
45 | var receivedElements = 0
46 | }
47 |
--------------------------------------------------------------------------------
/legacy-processor-1/src/main/scala/io/ticofab/legacyprocessor1/RSocketSink.scala:
--------------------------------------------------------------------------------
1 | package io.ticofab.legacyprocessor1
2 |
3 | import akka.stream._
4 | import akka.stream.stage._
5 | import io.rsocket._
6 | import io.rsocket.core._
7 | import io.rsocket.transport.netty.client.TcpClientTransport
8 | import io.rsocket.util._
9 | import reactor.util.retry.Retry
10 |
11 | import java.time.Duration
12 |
13 | class RSocketSink(port: Int, host: String = "0.0.0.0") extends GraphStage[SinkShape[String]] {
14 |
15 | val in: Inlet[String] = Inlet("RSocketSinkInput")
16 | var socket: RSocket = null // don't use null in prod!
17 |
18 | override def shape: SinkShape[String] = SinkShape(in)
19 |
20 | override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
21 | new GraphStageLogic(shape) {
22 |
23 | override def preStart(): Unit = {
24 | println(s"pre start, connecting to $host:$port")
25 | socket = RSocketConnector
26 | .create()
27 | .reconnect(Retry.backoff(50, Duration.ofMillis(500)))
28 | .connect(TcpClientTransport.create("localhost", 7000))
29 | .block
30 | println("socket connected")
31 |
32 | // Akka Streams semantics: ask an element as soon as you start.
33 | // 'pull' tells upstream that we have capacity to handle the next item.
34 | pull(in)
35 | }
36 |
37 | setHandler(
38 | in,
39 | new InHandler {
40 | override def onPush(): Unit = {
41 |
42 | // Akka Streams semantics: 'grab' returns the last item that has
43 | // been pulled (see above) so that we can do something with it.
44 | val payload = grab(in)
45 |
46 | // do operation
47 | socket
48 | .requestStream(DefaultPayload.create(payload))
49 | .doOnRequest(_ => pull(in))
50 | .doOnError(e => println(s"error ${e.getCause}"))
51 | .onErrorStop()
52 | .blockFirst()
53 | }
54 |
55 | override def onUpstreamFinish(): Unit = {
56 | println("RSocketSink onUpstreamFinish")
57 | super.onUpstreamFinish()
58 | socket.dispose()
59 | }
60 |
61 | override def onUpstreamFailure(ex: Throwable): Unit = {
62 | println("RSocketSink, onUpstreamFailure")
63 | super.onUpstreamFailure(ex)
64 | socket.dispose()
65 | }
66 | }
67 | )
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/legacy-processor-1/src/main/scala/io/ticofab/legacyprocessor1/RSocketSource.scala:
--------------------------------------------------------------------------------
1 | package io.ticofab.legacyprocessor1
2 |
3 | import akka.stream.stage.{GraphStage, GraphStageLogic, OutHandler}
4 | import akka.stream.{Attributes, Outlet, SourceShape}
5 | import io.rsocket.core.RSocketServer
6 | import io.rsocket.frame.decoder.PayloadDecoder
7 | import io.rsocket.transport.netty.server.TcpServerTransport
8 | import io.rsocket.{Payload, SocketAcceptor}
9 | import reactor.core.publisher.Mono
10 |
11 | import java.util.concurrent.LinkedBlockingDeque
12 |
13 | /**
14 | * Implements a Source that will listen to incoming RSocket fire N forget connections and
15 | * will push the received elements downstream.
16 | * @param port The TCP port to open for incoming connections
17 | * @param host The host (defaults to localhost)
18 | */
19 | class RSocketSource(port: Int, host: String = "0.0.0.0")
20 | extends GraphStage[SourceShape[Array[Byte]]] {
21 |
22 | // the outlet port of this stage which produces Ints
23 | val out: akka.stream.Outlet[Array[Byte]] = Outlet("RsocketSourceOut")
24 |
25 | // Coordination queue
26 | private val blockingQueue = new LinkedBlockingDeque[Array[Byte]]()
27 |
28 | override val shape: SourceShape[Array[Byte]] = SourceShape(out)
29 |
30 | override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
31 | new GraphStageLogic(shape) {
32 |
33 | val fNFSocketAcceptor: SocketAcceptor =
34 | SocketAcceptor.forFireAndForget((payload: Payload) => {
35 | // Get data
36 | val buffer = payload.getData
37 | val data = new Array[Byte](buffer.remaining())
38 | buffer.get(data)
39 | // Queue it for processing
40 | blockingQueue.add(data)
41 | payload.release()
42 | Mono.empty()
43 | })
44 |
45 | // Pre start. create server
46 | override def preStart(): Unit = {
47 | RSocketServer
48 | .create(fNFSocketAcceptor)
49 | .payloadDecoder(PayloadDecoder.ZERO_COPY)
50 | .bind(TcpServerTransport.create(host, port))
51 | .subscribe
52 |
53 | println(s"Bound RSocket server to $host:$port")
54 | }
55 |
56 | // create a handler for the outlet
57 | setHandler(
58 | out,
59 | new OutHandler {
60 | // when you are asked for data (pulled for data)
61 | override def onPull(): Unit = {
62 | // emit an element on the outlet, if exists
63 | push(out, blockingQueue.take())
64 | }
65 | }
66 | )
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/legacy-processor-2/src/main/java/io/ticofab/legacyprocessor2/LegacyProcessor2.java:
--------------------------------------------------------------------------------
1 | package io.ticofab.legacyprocessor2;
2 |
3 | import io.rsocket.SocketAcceptor;
4 | import io.rsocket.core.RSocketServer;
5 | import io.rsocket.frame.decoder.PayloadDecoder;
6 | import io.rsocket.transport.netty.server.TcpServerTransport;
7 | import org.apache.kafka.clients.producer.KafkaProducer;
8 | import org.apache.kafka.clients.producer.ProducerConfig;
9 | import org.apache.kafka.clients.producer.ProducerRecord;
10 | import org.apache.kafka.common.serialization.StringSerializer;
11 | import reactor.core.publisher.Flux;
12 |
13 | import java.util.Properties;
14 | import java.util.Random;
15 |
16 | public class LegacyProcessor2 {
17 |
18 | public static void main(String[] args) {
19 | log("starting legacy processor 2");
20 | KafkaProducer kafkaProducer = getKafkaProducer();
21 |
22 | // define the behaviour upon receiving data from the RSocket source
23 | var acceptorRS = SocketAcceptor.forRequestStream(
24 | payload -> {
25 | String data = payload.getDataUtf8();
26 |
27 | // encapsulates some legacy processing logic
28 | String processedData = doSomeProcessing(data);
29 |
30 | // publish to destination topic
31 | kafkaProducer.send(new ProducerRecord("SecondTopic", processedData));
32 |
33 | // when we return this, we signal that we are ready to process
34 | // the next item, conforming to the Reactive Streams standard.
35 | return Flux.empty();
36 | }
37 | );
38 |
39 | // create a RSocket server to accept items from upstream.
40 | var server = RSocketServer
41 | .create(acceptorRS)
42 | .payloadDecoder(PayloadDecoder.ZERO_COPY)
43 | .bind(TcpServerTransport.create("localhost", 7000))
44 | .subscribe(
45 | c -> log("connection established"),
46 | e -> log("error: " + e),
47 | () -> log("server creation completed.")
48 | );
49 |
50 | log("started server");
51 |
52 | // make the program wait for us before running to completion.
53 | try {
54 | Thread.sleep(1000000);
55 | log("Timer expired. I'm outta here!");
56 | server.dispose();
57 | } catch (InterruptedException e) {
58 | e.printStackTrace();
59 | }
60 |
61 | }
62 |
63 | /**
64 | * Consfigures and instantiates a KafkaProducer object.
65 | *
66 | * @return The KafkaProducer object.
67 | */
68 | private static KafkaProducer getKafkaProducer() {
69 | // Set up client Java properties
70 | Properties props = new Properties();
71 | props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
72 | props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
73 | props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
74 | props.setProperty(ProducerConfig.ACKS_CONFIG, "1");
75 | return new KafkaProducer<>(props);
76 | }
77 |
78 | private static final Random random = new Random();
79 |
80 | /**
81 | * Keeps the machine busy for a random number of seconds between 1 and 3
82 | */
83 | private static String doSomeProcessing(String data) {
84 | try {
85 | var seconds = random.nextDouble() > 0.8 ? 5 : 1;
86 | log("received " + data + " and processing for " + seconds + " seconds.");
87 | for (int i = 0; i < seconds - 1; i++ ) {
88 | Thread.sleep(1000);
89 | log("");
90 | }
91 | Thread.sleep(1000);
92 | return data + "-twice";
93 | } catch (InterruptedException e) {
94 | // something went wrong while processing, signal it downstream
95 | return data + "-then-failed";
96 | }
97 | }
98 |
99 | // a simple shortcut println method
100 | private static void log(String s) {
101 | System.out.println(s);
102 | }
103 | }
104 |
--------------------------------------------------------------------------------