Skip to content
This repository was archived by the owner on May 25, 2023. It is now read-only.

Commit 5107f95

Browse files
committed
Merge branch 'scalafmt' of https://.com/joan38/kafka-streams-scala into joan38-scalafmt
2 parents 560dcc1 + e0c0f68 commit 5107f95

32 files changed

+501
-592
lines changed

‎.scalafmt.conf

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
maxColumn = 120
2+
continuationIndent.defnSite = 2
3+
assumeStandardLibraryStripMargin = true
4+
danglingParentheses = true
5+
align = more
6+
rewrite.rules = [SortImports, RedundantBraces, RedundantParens, SortModifiers]

‎.travis.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@ sudo: false
33
jdk: oraclejdk8
44
scala:
55
- 2.11.11
6-
- 2.12.4
6+
- 2.12.6
77
sbt_args: -mem 2000
88
script:
9-
- sbt "++ ${TRAVIS_SCALA_VERSION}!" test
9+
- sbt "++ ${TRAVIS_SCALA_VERSION}!" scalafmtCheck Test/scalafmtCheck scalafmtSbtCheck test
1010
cache:
1111
directories:
1212
- "$HOME/.ivy2/cache"

‎build.sbt

+22-34
Original file line numberDiff line numberDiff line change
@@ -1,58 +1,46 @@
11
import Dependencies._
22

33
name := "kafka-streams-scala"
4-
54
organization := "com.lightbend"
6-
75
version := "0.2.1"
8-
96
scalaVersion := Versions.Scala_2_12_Version
10-
117
crossScalaVersions := Versions.CrossScalaVersions
12-
138
scalacOptions := Seq("-Xexperimental", "-unchecked", "-deprecation", "-Ywarn-unused-import")
14-
15-
parallelExecution in Test := false
16-
17-
libraryDependencies ++= Seq(
18-
kafkaStreams excludeAll(ExclusionRule("org.slf4j", "slf4j-log4j12"), ExclusionRule("org.apache.zookeeper", "zookeeper")),
19-
scalaLogging % "test",
20-
logback % "test",
21-
kafka % "test" excludeAll(ExclusionRule("org.slf4j", "slf4j-log4j12"), ExclusionRule("org.apache.zookeeper", "zookeeper")),
22-
curator % "test",
23-
minitest % "test",
24-
minitestLaws % "test",
25-
algebird % "test",
26-
chill % "test",
27-
avro4s % "test"
28-
)
29-
30-
testFrameworks += new TestFramework("minitest.runner.Framework")
31-
329
licenses := Seq("Apache 2" -> new URL("http://www.apache.org/licenses/LICENSE-2.0.txt"))
33-
3410
developers := List(
3511
Developer("debasishg", "Debasish Ghosh", "@debasishg", url("https://.com/debasishg")),
3612
Developer("blublinsky", "Boris Lublinsky", "@blublinsky", url("https://.com/blublinsky")),
3713
Developer("maasg", "Gerard Maas", "@maasg", url("https://.com/maasg"))
3814
)
39-
4015
organizationName := "lightbend"
41-
42-
organizationHomepage := Some(url("http://lightbend.com/"))
43-
16+
organizationHomepage := Option(url("http://lightbend.com/"))
4417
homepage := scmInfo.value map (_.browseUrl)
18+
scmInfo := Option(
19+
ScmInfo(url("https://.com/lightbend/kafka-streams-scala"), "[email protected]:lightbend/kafka-streams-scala.git")
20+
)
4521

46-
scmInfo := Some(ScmInfo(url("https://.com/lightbend/kafka-streams-scala"), "[email protected]:lightbend/kafka-streams-scala.git"))
22+
parallelExecution in Test := false
23+
testFrameworks += new TestFramework("minitest.runner.Framework")
4724

48-
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
25+
libraryDependencies ++= Seq(
26+
kafkaStreams excludeAll (ExclusionRule("org.slf4j", "slf4j-log4j12"), ExclusionRule("org.apache.zookeeper",
27+
"zookeeper")),
28+
scalaLogging % "test",
29+
logback % "test",
30+
kafka % "test" excludeAll (ExclusionRule("org.slf4j", "slf4j-log4j12"), ExclusionRule("org.apache.zookeeper",
31+
"zookeeper")),
32+
curator % "test",
33+
minitest % "test",
34+
minitestLaws % "test",
35+
algebird % "test",
36+
chill % "test",
37+
avro4s % "test"
38+
)
4939

40+
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
5041
publishTo := {
5142
val nexus = "https://oss.sonatype.org/"
5243
if (isSnapshot.value) Some("snapshots" at nexus + "content/repositories/snapshots")
53-
else Some("releases" at nexus + "service/local/staging/deploy/maven2")
44+
else Option("releases" at nexus + "service/local/staging/deploy/maven2")
5445
}
55-
56-
publishMavenStyle := true
57-
5846
publishArtifact in Test := true

‎project/Dependencies.scala

+13-12
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,22 @@ object Dependencies {
55

66
implicit class Exclude(module: ModuleID) {
77
def log4jExclude: ModuleID =
8-
module excludeAll(ExclusionRule("log4j"))
8+
module.excludeAll(ExclusionRule("log4j"))
99

1010
def driverExclusions: ModuleID =
11-
module.log4jExclude.exclude("com.google.guava", "guava")
11+
module.log4jExclude
12+
.exclude("com.google.guava", "guava")
1213
.excludeAll(ExclusionRule("org.slf4j"))
1314
}
1415

15-
val kafkaStreams = "org.apache.kafka" % "kafka-streams" % KafkaVersion
16-
val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % ScalaLoggingVersion
17-
val logback = "ch.qos.logback" % "logback-classic" % LogbackVersion
18-
val kafka = "org.apache.kafka" %% "kafka" % KafkaVersion
19-
val curator = "org.apache.curator" % "curator-test" % CuratorVersion
20-
val minitest = "io.monix" %% "minitest" % MinitestVersion
21-
val minitestLaws = "io.monix" %% "minitest-laws" % MinitestVersion
22-
val algebird = "com.twitter" %% "algebird-core" % AlgebirdVersion
23-
val chill = "com.twitter" %% "chill" % ChillVersion
24-
val avro4s = "com.sksamuel.avro4s" %% "avro4s-core" % Avro4sVersion
16+
val kafkaStreams = "org.apache.kafka" % "kafka-streams" % KafkaVersion
17+
val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % ScalaLoggingVersion
18+
val logback = "ch.qos.logback" % "logback-classic" % LogbackVersion
19+
val kafka = "org.apache.kafka" %% "kafka" % KafkaVersion
20+
val curator = "org.apache.curator" % "curator-test" % CuratorVersion
21+
val minitest = "io.monix" %% "minitest" % MinitestVersion
22+
val minitestLaws = "io.monix" %% "minitest-laws" % MinitestVersion
23+
val algebird = "com.twitter" %% "algebird-core" % AlgebirdVersion
24+
val chill = "com.twitter" %% "chill" % ChillVersion
25+
val avro4s = "com.sksamuel.avro4s" %% "avro4s-core" % Avro4sVersion
2526
}

‎project/Versions.scala

+11-11
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
object Versions {
2-
val AlgebirdVersion = "0.13.0"
3-
val ChillVersion = "0.9.2"
4-
val LogbackVersion = "1.2.3"
5-
val KafkaVersion = "1.0.0"
2+
val AlgebirdVersion = "0.13.0"
3+
val ChillVersion = "0.9.2"
4+
val LogbackVersion = "1.2.3"
5+
val KafkaVersion = "1.0.0"
66
val ScalaLoggingVersion = "3.5.0"
7-
val CuratorVersion = "4.0.0"
8-
val MinitestVersion = "2.0.0"
9-
val JDKVersion = "1.8"
10-
val Scala_2_12_Version = "2.12.5"
11-
val Scala_2_11_Version = "2.11.11"
12-
val Avro4sVersion = "1.8.3"
13-
val CrossScalaVersions = Seq(Scala_2_12_Version, Scala_2_11_Version )
7+
val CuratorVersion = "4.0.0"
8+
val MinitestVersion = "2.0.0"
9+
val JDKVersion = "1.8"
10+
val Scala_2_12_Version = "2.12.6"
11+
val Scala_2_11_Version = "2.11.12"
12+
val Avro4sVersion = "1.8.3"
13+
val CrossScalaVersions = Seq(Scala_2_12_Version, Scala_2_11_Version)
1414
}

‎project/build.properties

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
sbt.version=1.1.0
1+
sbt.version=1.1.4

‎project/plugins.sbt

+2-1
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
1+
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
2+
addSbtPlugin("com.geirsson" % "sbt-scalafmt" % "1.5.0")
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,20 @@
11
/**
2-
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3-
* Copyright 2017-2018 Alexis Seigneurin.
4-
*/
5-
2+
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3+
* Copyright 2017-2018 Alexis Seigneurin.
4+
*/
65
package com.lightbend.kafka.scala.streams
76

87
import org.apache.kafka.common.serialization.{Serde, Serdes}
98

10-
119
/**
12-
* Implicit values for default serdes
13-
*/
10+
* Implicit values for default serdes
11+
*/
1412
object DefaultSerdes {
15-
implicit val stringSerde: Serde[String] = Serdes.String()
16-
implicit val longSerde: Serde[Long] = Serdes.Long().asInstanceOf[Serde[Long]]
17-
implicit val byteArraySerde: Serde[Array[Byte]] = Serdes.ByteArray()
13+
implicit val stringSerde: Serde[String] = Serdes.String()
14+
implicit val longSerde: Serde[Long] = Serdes.Long().asInstanceOf[Serde[Long]]
15+
implicit val byteArraySerde: Serde[Array[Byte]] = Serdes.ByteArray()
1816
implicit val bytesSerde: Serde[org.apache.kafka.common.utils.Bytes] = Serdes.Bytes()
19-
implicit val floatSerde: Serde[Float] = Serdes.Float().asInstanceOf[Serde[Float]]
20-
implicit val doubleSerde: Serde[Double] = Serdes.Double().asInstanceOf[Serde[Double]]
21-
implicit val integerSerde: Serde[Int] = Serdes.Integer().asInstanceOf[Serde[Int]]
17+
implicit val floatSerde: Serde[Float] = Serdes.Float().asInstanceOf[Serde[Float]]
18+
implicit val doubleSerde: Serde[Double] = Serdes.Double().asInstanceOf[Serde[Double]]
19+
implicit val integerSerde: Serde[Int] = Serdes.Integer().asInstanceOf[Serde[Int]]
2220
}

‎src/main/scala/com/lightbend/kafka/scala/streams/FunctionConversions.scala

+13-14
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,29 @@
11
/**
2-
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3-
* Copyright 2017-2018 Alexis Seigneurin.
4-
*/
5-
2+
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3+
* Copyright 2017-2018 Alexis Seigneurin.
4+
*/
65
package com.lightbend.kafka.scala.streams
76

87
import org.apache.kafka.streams.KeyValue
98
import org.apache.kafka.streams.kstream._
109

1110
/**
12-
* Implicit classes that offer conversions of Scala function literals to
13-
* SAM (Single Abstract Method) objects in Java. These make the Scala APIs much
14-
* more expressive, with less boilerplate and more succinct.
15-
*/
11+
* Implicit classes that offer conversions of Scala function literals to
12+
* SAM (Single Abstract Method) objects in Java. These make the Scala APIs much
13+
* more expressive, with less boilerplate and more succinct.
14+
*/
1615
object FunctionConversions {
1716

1817
implicit class PredicateFromFunction[K, V](val test: (K, V) => Boolean) extends AnyVal {
19-
def asPredicate: Predicate[K,V] = test(_,_)
18+
def asPredicate: Predicate[K, V] = test(_, _)
2019
}
2120

22-
implicit class MapperFromFunction[T, U, V](val f:(T,U) => V) extends AnyVal {
21+
implicit class MapperFromFunction[T, U, V](val f: (T, U) => V) extends AnyVal {
2322
def asKeyValueMapper: KeyValueMapper[T, U, V] = (k: T, v: U) => f(k, v)
24-
def asValueJoiner: ValueJoiner[T,U,V] = (v1, v2) => f(v1, v2)
23+
def asValueJoiner: ValueJoiner[T, U, V] = (v1, v2) => f(v1, v2)
2524
}
2625

27-
implicit class KeyValueMapperFromFunction[K, V, KR, VR](val f:(K,V) => (KR, VR)) extends AnyVal {
26+
implicit class KeyValueMapperFromFunction[K, V, KR, VR](val f: (K, V) => (KR, VR)) extends AnyVal {
2827
def asKeyValueMapper: KeyValueMapper[K, V, KeyValue[KR, VR]] = (k, v) => {
2928
val (kr, vr) = f(k, v)
3029
KeyValue.pair(kr, vr)
@@ -36,10 +35,10 @@ object FunctionConversions {
3635
}
3736

3837
implicit class AggregatorFromFunction[K, V, VR](val f: (K, V, VR) => VR) extends AnyVal {
39-
def asAggregator: Aggregator[K, V, VR] = (k,v,r) => f(k,v,r)
38+
def asAggregator: Aggregator[K, V, VR] = (k, v, r) => f(k, v, r)
4039
}
4140

42-
implicit class MergerFromFunction[K,VR](val f: (K, VR, VR) => VR) extends AnyVal {
41+
implicit class MergerFromFunction[K, VR](val f: (K, VR, VR) => VR) extends AnyVal {
4342
def asMerger: Merger[K, VR] = (k, v1, v2) => f(k, v1, v2)
4443
}
4544

‎src/main/scala/com/lightbend/kafka/scala/streams/ImplicitConversions.scala

+9-9
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,19 @@
11
/**
2-
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3-
* Copyright 2017-2018 Alexis Seigneurin.
4-
*/
5-
2+
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3+
* Copyright 2017-2018 Alexis Seigneurin.
4+
*/
65
package com.lightbend.kafka.scala.streams
76

87
import org.apache.kafka.streams.kstream._
9-
import org.apache.kafka.streams.{ KeyValue, Consumed }
8+
import org.apache.kafka.streams.{Consumed, KeyValue}
109
import org.apache.kafka.common.serialization.Serde
1110

1211
import scala.language.implicitConversions
1312

1413
/**
15-
* Implicit conversions between the Scala wrapper objects and the underlying Java
16-
* objects.
17-
*/
14+
* Implicit conversions between the Scala wrapper objects and the underlying Java
15+
* objects.
16+
*/
1817
object ImplicitConversions {
1918

2019
implicit def wrapKStream[K, V](inner: KStream[K, V]): KStreamS[K, V] =
@@ -50,7 +49,8 @@ object ImplicitConversions {
5049
implicit def producedFromSerde[K, V](implicit keySerde: Serde[K], valueSerde: Serde[V]): Produced[K, V] =
5150
Produced.`with`(keySerde, valueSerde)
5251

53-
implicit def joinedFromKVOSerde[K, V, VO](implicit keySerde: Serde[K], valueSerde: Serde[V],
52+
implicit def joinedFromKVOSerde[K, V, VO](implicit keySerde: Serde[K],
53+
valueSerde: Serde[V],
5454
otherValueSerde: Serde[VO]): Joined[K, V, VO] =
5555
Joined.`with`(keySerde, valueSerde, otherValueSerde)
5656
}

‎src/main/scala/com/lightbend/kafka/scala/streams/KGroupedStreamS.scala

+19-27
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
/**
2-
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3-
* Copyright 2017-2018 Alexis Seigneurin.
4-
*/
5-
2+
* Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
3+
* Copyright 2017-2018 Alexis Seigneurin.
4+
*/
65
package com.lightbend.kafka.scala.streams
76

87
import org.apache.kafka.streams.kstream._
@@ -12,10 +11,9 @@ import org.apache.kafka.common.serialization.Serde
1211
import ImplicitConversions._
1312
import FunctionConversions._
1413

15-
1614
/**
17-
* Wraps the Java class KGroupedStream and delegates method calls to the underlying Java object.
18-
*/
15+
* Wraps the Java class KGroupedStream and delegates method calls to the underlying Java object.
16+
*/
1917
class KGroupedStreamS[K, V](inner: KGroupedStream[K, V]) {
2018

2119
def count(): KTableS[K, Long] = {
@@ -24,47 +22,41 @@ class KGroupedStreamS[K, V](inner: KGroupedStream[K, V]) {
2422
}
2523

2624
def count(store: String, keySerde: Option[Serde[K]] = None): KTableS[K, Long] = {
27-
val materialized = keySerde.foldLeft(Materialized.as[K, java.lang.Long, KeyValueStore[Bytes, Array[Byte]]](store))((m,serde)=> m.withKeySerde(serde))
25+
val materialized = keySerde.foldLeft(Materialized.as[K, java.lang.Long, KeyValueStore[Bytes, Array[Byte]]](store))(
26+
(m, serde) => m.withKeySerde(serde)
27+
)
2828

2929
val c: KTableS[K, java.lang.Long] = inner.count(materialized)
3030
c.mapValues[Long](Long2long _)
3131
}
3232

33-
def reduce(reducer: (V, V) => V): KTableS[K, V] = {
33+
def reduce(reducer: (V, V) => V): KTableS[K, V] =
3434
inner.reduce((v1, v2) => reducer(v1, v2))
35-
}
36-
37-
def reduce(reducer: (V, V) => V,
38-
materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, V] = {
3935

36+
def reduce(reducer: (V, V) => V, materialized: Materialized[K, V, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, V] =
4037
// need this explicit asReducer for Scala 2.11 or else the SAM conversion doesn't take place
4138
// works perfectly with Scala 2.12 though
4239
inner.reduce(((v1: V, v2: V) => reducer(v1, v2)).asReducer, materialized)
43-
}
44-
45-
def reduce(reducer: (V, V) => V,
46-
storeName: String)(implicit keySerde: Serde[K], valueSerde: Serde[V]): KTableS[K, V] = {
4740

41+
def reduce(reducer: (V, V) => V, storeName: String)(implicit keySerde: Serde[K],
42+
valueSerde: Serde[V]): KTableS[K, V] =
4843
// need this explicit asReducer for Scala 2.11 or else the SAM conversion doesn't take place
4944
// works perfectly with Scala 2.12 though
50-
inner.reduce(((v1: V, v2: V) =>
51-
reducer(v1, v2)).asReducer,
52-
Materialized.as[K, V, KeyValueStore[Bytes, Array[Byte]]](storeName)
45+
inner.reduce(
46+
((v1: V, v2: V) => reducer(v1, v2)).asReducer,
47+
Materialized
48+
.as[K, V, KeyValueStore[Bytes, Array[Byte]]](storeName)
5349
.withKeySerde(keySerde)
5450
.withValueSerde(valueSerde)
5551
)
56-
}
5752

58-
def aggregate[VR](initializer: () => VR,
59-
aggregator: (K, V, VR) => VR): KTableS[K, VR] = {
53+
def aggregate[VR](initializer: () => VR, aggregator: (K, V, VR) => VR): KTableS[K, VR] =
6054
inner.aggregate(initializer.asInitializer, aggregator.asAggregator)
61-
}
6255

6356
def aggregate[VR](initializer: () => VR,
64-
aggregator: (K, V, VR) => VR,
65-
materialized: Materialized[K, VR, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, VR] = {
57+
aggregator: (K, V, VR) => VR,
58+
materialized: Materialized[K, VR, KeyValueStore[Bytes, Array[Byte]]]): KTableS[K, VR] =
6659
inner.aggregate(initializer.asInitializer, aggregator.asAggregator, materialized)
67-
}
6860

6961
def windowedBy(windows: SessionWindows): SessionWindowedKStreamS[K, V] =
7062
inner.windowedBy(windows)

0 commit comments

Comments
 (0)