From f1f25290b1a59b88c54745d51c719b5448cf0848 Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Tue, 19 Nov 2019 09:53:44 -1000 Subject: [PATCH 001/306] Setting version to 0.13.7-SNAPSHOT --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index a88f7953d..ee61ea4b2 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "0.13.6" +version in ThisBuild := "0.13.7-SNAPSHOT" From 62bebc9a6e68c8b13788104123b5fc657e2bf251 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 20 Nov 2019 16:24:41 +0100 Subject: [PATCH 002/306] Update sbt-scoverage to 1.6.1 (#744) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 0e85809d0..690294b72 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,7 +12,7 @@ addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.2.1") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.0") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.8") From e4289694ed1069e0aa182391124ff84f12c73256 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 21 Nov 2019 15:15:14 +0100 Subject: [PATCH 003/306] Update sbt-sonatype to 3.8.1 (#745) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 690294b72..272e589ad 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,6 +13,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.2.1") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.8") From 3fd6e0ba3afa141173a8a3cacf8cf9a3d109a1d5 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 24 Nov 2019 16:48:03 +0100 Subject: [PATCH 004/306] Update sbt to 1.3.4 (#746) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 6adcdc753..5a9ed9251 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.3 +sbt.version=1.3.4 From 3b7074fa618fdbeb65454192cb0b68a99215ceae Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 1 Dec 2019 05:16:42 +0100 Subject: [PATCH 005/306] Update sbt-scalafix to 0.9.11 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 272e589ad..d5bd1a978 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.8") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.11") From 200efc62490cf972ec885c571c9ef7e9816a5ee8 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Mon, 2 Dec 2019 16:50:55 -0500 Subject: [PATCH 006/306] (Fix) Update scalatest to 3.1.0 (#750) * Update scalatest to 3.1.0 * Add scalatestplus-scalacheck * fixup! Add scalatestplus-scalacheck --- .../algebird/AlgebraResolutionTest.scala | 4 ++-- .../algebird/spark/AlgebirdRDDTests.scala | 3 ++- .../twitter/algebird/AbstractAlgebraTest.scala | 2 +- .../algebird/AppendAggregatorTest.scala | 4 +++- .../com/twitter/algebird/ApproximateTest.scala | 5 +++-- .../com/twitter/algebird/BatchedTest.scala | 4 +++- .../com/twitter/algebird/BloomFilterTest.scala | 5 +++-- .../scala/com/twitter/algebird/BytesSpec.scala | 5 +++-- .../com/twitter/algebird/CheckProperties.scala | 4 ++-- .../twitter/algebird/CountMinSketchTest.scala | 18 ++++++++++-------- .../com/twitter/algebird/EventuallyTest.scala | 7 +++++-- .../com/twitter/algebird/ExpHistLaws.scala | 6 +++--- .../scala/com/twitter/algebird/FoldTest.scala | 4 ++-- .../com/twitter/algebird/HyperLogLogTest.scala | 7 +++++-- .../com/twitter/algebird/MinHasherTest.scala | 6 ++++-- .../algebird/MinMaxAggregatorSpec.scala | 4 +++- .../com/twitter/algebird/MomentsLaws.scala | 4 +++- .../com/twitter/algebird/MonadFoldMTest.scala | 4 +++- .../algebird/NumericSpecification.scala | 5 +++-- .../com/twitter/algebird/OperatorTest.scala | 4 +++- .../scala/com/twitter/algebird/QTreeTest.scala | 5 +++-- .../scala/com/twitter/algebird/ScanTest.scala | 5 +++-- .../com/twitter/algebird/SemigroupTest.scala | 4 ++-- .../com/twitter/algebird/SetDiffTest.scala | 5 +++-- .../com/twitter/algebird/SketchMapTest.scala | 4 +++- .../com/twitter/algebird/SpaceSaverTest.scala | 4 +++- .../twitter/algebird/SummingIteratorTest.scala | 5 +++-- .../algebird/TupleAggregatorsTest.scala | 4 +++- .../algebird/statistics/StatisticsTests.scala | 6 ++++-- .../summer/SyncSummingQueueProperties.scala | 5 +++-- build.sbt | 6 ++++-- 31 files changed, 100 insertions(+), 58 deletions(-) diff --git a/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala b/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala index 6466c85a1..44541ec33 100644 --- a/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala +++ b/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala @@ -1,12 +1,12 @@ package com.twitter.algebird -import org.scalatest.FunSuite +import org.scalatest.funsuite.AnyFunSuite /** * This is just a compilation test that we can resolve * algebird types from implicit algebra instances. */ -class AlgebraResolutionTest extends FunSuite { +class AlgebraResolutionTest extends AnyFunSuite { // A type with no built in algebird algebras trait Empty {} test("algebra.Semigroup") { diff --git a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala index 833b0bc19..16e409489 100644 --- a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala +++ b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala @@ -5,6 +5,7 @@ import org.apache.spark._ import org.apache.spark.rdd._ import org.scalatest._ import scala.reflect.ClassTag +import org.scalatest.funsuite.AnyFunSuite package test { // not needed in the algebird package, just testing the API @@ -18,7 +19,7 @@ package test { * This test almost always times out on travis. * Leaving at least a compilation test of using with spark */ -class AlgebirdRDDTest extends FunSuite with BeforeAndAfter { +class AlgebirdRDDTest extends AnyFunSuite with BeforeAndAfter { private var sc: SparkContext = _ diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala index afb4434d5..800111b97 100755 --- a/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala @@ -2,8 +2,8 @@ package com.twitter.algebird import com.twitter.algebird.BaseProperties._ import org.scalacheck.{Arbitrary, Gen} -import org.scalatest.Matchers import org.scalacheck.Prop._ +import org.scalatest.matchers.should.Matchers class AbstractAlgebraTest extends CheckProperties with Matchers { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala index f2e3d7175..45b73c7fa 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala @@ -1,8 +1,10 @@ package com.twitter.algebird import org.scalatest._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec -class AppendAggregatorTest extends WordSpec with Matchers { +class AppendAggregatorTest extends AnyWordSpec with Matchers { val data = Vector.fill(100) { scala.util.Random.nextInt(100) } val mpty = Vector.empty[Int] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala index dde6fb2b9..5d32bb0ac 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala @@ -1,8 +1,9 @@ package com.twitter.algebird import org.scalacheck.{Arbitrary, Gen} -import org.scalatest.{Matchers, WordSpec} import org.scalacheck.Prop._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec // TODO add tests for scala check that uses a statistical test to check // that an ApproximateBoolean agrees with the correct Boolean at least as often @@ -93,7 +94,7 @@ class ApproximateLaws extends CheckProperties { } } -class ApproximateTest extends WordSpec with Matchers { +class ApproximateTest extends AnyWordSpec with Matchers { "Approximate" should { "Correctly identify exact" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala index 46e4fba92..f2e867337 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala @@ -5,6 +5,8 @@ import org.scalatest._ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.{Arbitrary, Gen} import Arbitrary.arbitrary +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec object Helpers { implicit def arbitraryBatched[A: Arbitrary]: Arbitrary[Batched[A]] = { @@ -50,7 +52,7 @@ class BatchedLaws extends CheckProperties { testBatchedMonoid[String]("String", 1000000) } -class BatchedTests extends PropSpec with Matchers with ScalaCheckPropertyChecks { +class BatchedTests extends AnyPropSpec with Matchers with ScalaCheckPropertyChecks { property(".iterator works") { forAll { (x: Int, xs: List[Int]) => Batched(x).append(xs).iterator.toList shouldBe (x :: xs) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala index 2c05de29d..b732d3795 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala @@ -3,8 +3,9 @@ package com.twitter.algebird import java.io.{ByteArrayOutputStream, ObjectOutputStream} import org.scalacheck.{Arbitrary, Gen} -import org.scalatest.{Matchers, WordSpec} import org.scalacheck.Prop._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec object BloomFilterTestUtils { def toSparse[A](bf: BF[A]): BFSparse[A] = bf match { @@ -285,7 +286,7 @@ class BloomFilterProperties extends ApproximateProperties("BloomFilter") { } } -class BloomFilterTest extends WordSpec with Matchers { +class BloomFilterTest extends AnyWordSpec with Matchers { val RAND = new scala.util.Random diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BytesSpec.scala b/algebird-test/src/test/scala/com/twitter/algebird/BytesSpec.scala index 263107d62..989811262 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BytesSpec.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BytesSpec.scala @@ -2,9 +2,10 @@ package com.twitter.algebird import org.scalacheck.Gen import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks -import org.scalatest.{Matchers, WordSpec} +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec -class BytesSpec extends WordSpec with Matchers with ScalaCheckDrivenPropertyChecks { +class BytesSpec extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { "requires non-null arrays" in { an[IllegalArgumentException] should be thrownBy Bytes(null) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala index 090915226..e6e44467d 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala @@ -1,12 +1,12 @@ package com.twitter.algebird -import org.scalatest.PropSpec import org.scalatestplus.scalacheck.Checkers +import org.scalatest.propspec.AnyPropSpec /** * @author Mansur Ashraf. */ -trait CheckProperties extends PropSpec with Checkers { +trait CheckProperties extends AnyPropSpec with Checkers { def property(testName: String, testTags: org.scalatest.Tag*)(testFun: org.scalacheck.Prop): Unit = super.property(testName, testTags: _*) { check { testFun } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala index affb3f76e..47b3d1b71 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala @@ -1,11 +1,13 @@ package com.twitter.algebird -import org.scalatest.{Matchers, PropSpec, WordSpec} import org.scalatestplus.scalacheck.{ScalaCheckDrivenPropertyChecks, ScalaCheckPropertyChecks} import org.scalacheck.{Arbitrary, Gen} import scala.util.Random import CMSHasherImplicits.CMSHasherBigInt +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec +import org.scalatest.wordspec.AnyWordSpec object CmsLaws { def int2Bytes(i: Int): Bytes = Bytes(BigInt(i).toByteArray) @@ -144,7 +146,7 @@ class TopPctCmsLaws extends CheckProperties { } } -class SparseCMSTest extends WordSpec with Matchers with ScalaCheckDrivenPropertyChecks { +class SparseCMSTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { val DELTA = 1e-8 val EPS = 0.005 @@ -161,7 +163,7 @@ class SparseCMSTest extends WordSpec with Matchers with ScalaCheckDrivenProperty } } -class CMSInstanceTest extends WordSpec with Matchers with ScalaCheckDrivenPropertyChecks { +class CMSInstanceTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { val DELTA = 1e-8 val EPS = 0.005 @@ -179,7 +181,7 @@ class CMSInstanceTest extends WordSpec with Matchers with ScalaCheckDrivenProper /** * Verifies contramap functionality, which allows us to translate `CMSHasher[K]` into `CMSHasher[L]`, given `f: L => K`. */ -class CMSContraMapSpec extends WordSpec with Matchers with ScalaCheckDrivenPropertyChecks { +class CMSContraMapSpec extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { "translates CMSHasher[K] into CMSHasher[L], given a function f: L => K" in { // Given a "source" CMSHasher[K] @@ -412,7 +414,7 @@ class CmsProperties extends ApproximateProperties("CountMinSketch") { abstract class CMSRingTest[K: CMSHasher: Ring] extends CMSTest[K]((x: Int) => implicitly[Ring[K]].fromInt(x)) abstract class CMSTest[K: CMSHasher](toK: Int => K) - extends WordSpec + extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { @@ -944,7 +946,7 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) } } -class CMSFunctionsSpec extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class CMSFunctionsSpec extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { property("roundtrips width->eps->width") { forAll { (i: Int) => whenever(i > 0) { @@ -993,7 +995,7 @@ class CMSFunctionsSpec extends PropSpec with ScalaCheckPropertyChecks with Match } -class CMSParamsSpec extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class CMSParamsSpec extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { val AnyEps = 0.001 val AnyDelta = 1e-5 @@ -1044,7 +1046,7 @@ class CMSHasherBytesSpec extends CMSHasherSpec[Bytes](CmsLaws.int2Bytes(_)) abstract class CMSHasherRingSpec[K: CMSHasher: Ring] extends CMSHasherSpec[K](implicitly[Ring[K]].fromInt(_)) abstract class CMSHasherSpec[K: CMSHasher](toK: Int => K) - extends PropSpec + extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index b72d470d6..7bd13259e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -3,6 +3,9 @@ package com.twitter.algebird import org.scalatest._ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.{Arbitrary, Gen, Prop} +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec +import org.scalatest.wordspec.AnyWordSpec class EventuallyRingLaws extends CheckProperties { import BaseProperties._ @@ -55,7 +58,7 @@ class EventuallyMonoidLaws extends CheckProperties { } -class EventuallyTest extends WordSpec with Matchers { +class EventuallyTest extends AnyWordSpec with Matchers { val eventuallyMonoid = new EventuallyMonoid[Int, String](_.length)(_.length > 100) @@ -152,7 +155,7 @@ class EventuallyTest extends WordSpec with Matchers { } -class EventuallyAggregatorLaws extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class EventuallyAggregatorLaws extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { implicit def aggregator[A, B, C]( implicit prepare: Arbitrary[A => B], diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala index 1e4e49663..d0ac10995 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala @@ -2,10 +2,10 @@ package com.twitter.algebird import com.twitter.algebird.scalacheck.{NonEmptyVector, PosNum} import com.twitter.algebird.scalacheck.arbitrary._ -import org.scalatest.PropSpec import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +import org.scalatest.propspec.AnyPropSpec -class ExpHistLaws extends PropSpec with ScalaCheckPropertyChecks { +class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { import ExpHist.{Bucket, Canonical, Config, Timestamp} property("Increment example from DGIM paper") { @@ -252,7 +252,7 @@ class ExpHistLaws extends PropSpec with ScalaCheckPropertyChecks { } } -class CanonicalLaws extends PropSpec with ScalaCheckPropertyChecks { +class CanonicalLaws extends AnyPropSpec with ScalaCheckPropertyChecks { import ExpHist.Canonical._ property("l-canonical representation is all l or l+1s except for last") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala index aa0e1122e..303806393 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala @@ -1,8 +1,8 @@ package com.twitter.algebird -import org.scalatest._ +import org.scalatest.wordspec.AnyWordSpec -class FoldTest extends WordSpec { +class FoldTest extends AnyWordSpec { sealed trait Case[I, O] { def expected: O diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index 94ebc8790..6c1c3f326 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -8,6 +8,9 @@ import org.scalacheck.{Arbitrary, Gen, Prop} import scala.collection.BitSet import java.lang.AssertionError +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec +import org.scalatest.wordspec.AnyWordSpec object ReferenceHyperLogLog { @@ -90,7 +93,7 @@ class HyperLogLogLaws extends CheckProperties { } /* Ensure jRhoW matches referenceJRhoW */ -class jRhoWMatchTest extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class jRhoWMatchTest extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { import HyperLogLog._ implicit val hashGen = Arbitrary { @@ -286,7 +289,7 @@ class SetSizeAggregatorProperties extends ApproximateProperties("SetSizeAggregat } } -class HyperLogLogTest extends WordSpec with Matchers { +class HyperLogLogTest extends AnyWordSpec with Matchers { import HyperLogLog._ //Get the implicit int2bytes, long2Bytes diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala index 9e8ac9047..ec4abfe63 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala @@ -2,8 +2,10 @@ package com.twitter.algebird import com.twitter.algebird.BaseProperties._ import org.scalacheck.{Arbitrary, Gen} -import org.scalatest.{Matchers, _} +import org.scalatest._ import scala.math.Equiv +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec class MinHasherTest extends CheckProperties { implicit val mhMonoid = new MinHasher32(0.5, 512) @@ -17,7 +19,7 @@ class MinHasherTest extends CheckProperties { } } -class MinHasherSpec extends WordSpec with Matchers { +class MinHasherSpec extends AnyWordSpec with Matchers { val r = new java.util.Random def test[H](mh: MinHasher[H], similarity: Double, epsilon: Double) = { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala index 46d054511..e5fd755ba 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala @@ -1,8 +1,10 @@ package com.twitter.algebird import org.scalatest._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec -class MinMaxAggregatorSpec extends WordSpec with Matchers { +class MinMaxAggregatorSpec extends AnyWordSpec with Matchers { sealed trait TestElementParent case object TestElementA extends TestElementParent case object TestElementB extends TestElementParent diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 819f088e1..2ea91ac31 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -3,6 +3,8 @@ package com.twitter.algebird import org.scalatest._ import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.scalacheck.arbitrary._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec class MomentsLaws extends CheckProperties { val EPS = 1e-10 @@ -20,7 +22,7 @@ class MomentsLaws extends CheckProperties { } } -class MomentsTest extends WordSpec with Matchers { +class MomentsTest extends AnyWordSpec with Matchers { def testApproxEq(f1: Double, f2: Double): Unit = assert(approxEq(1e-10)(f1, f2)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala index 8205cbd39..169c5b140 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala @@ -1,8 +1,10 @@ package com.twitter.algebird import org.scalatest._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec -class MonadFoldMTest extends WordSpec with Matchers { +class MonadFoldMTest extends AnyWordSpec with Matchers { def binSmalls(x: Int, y: Int): Option[Int] = if (y > 9) None else Some(x + y) "A monad foldM" should { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala index 988e2c3fd..cf9057da4 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala @@ -1,15 +1,16 @@ package com.twitter.algebird -import org.scalatest.{Matchers, PropSpec} import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.Arbitrary +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec /** * Tests abstract algebra against scala's Numeric trait * Numeric is basically the ring trait with ordering, so we can use it * below to test all the numeric traits. */ -class NumericSpecification extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { def plusNumericProp[T: Monoid: Numeric: Arbitrary] = forAll { (a: T, b: T) => val mon = implicitly[Monoid[T]] val num = implicitly[Numeric[T]] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala index 8e8eca46c..050d957ae 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala @@ -3,8 +3,10 @@ package com.twitter.algebird import org.scalatest._ import Operators._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec -class OperatorTest extends WordSpec with Matchers { +class OperatorTest extends AnyWordSpec with Matchers { "Operators" should { "allow plus" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala index 5469ae3b3..cb1accf5b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala @@ -20,7 +20,8 @@ import org.scalatest._ import org.scalacheck.Arbitrary import org.scalacheck.Gen.choose -import org.scalatest.Matchers +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec class QTreeLaws extends CheckProperties { import BaseProperties._ @@ -35,7 +36,7 @@ class QTreeLaws extends CheckProperties { } } -class QTreeTest extends WordSpec with Matchers { +class QTreeTest extends AnyWordSpec with Matchers { def randomList(n: Long) = (1L to n).map(_ => math.random) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala index 4538f4f8f..37732df67 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala @@ -2,9 +2,10 @@ package com.twitter.algebird import org.scalacheck.Gen import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks -import org.scalatest.{Matchers, WordSpec} import scala.collection.mutable.Queue +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec object ScanTest { // normal people will use Scan not Scan.Aux, so it's good for most of the tests to be using the more common interface. @@ -56,7 +57,7 @@ object ScanTest { } -class ScanTest extends WordSpec with Matchers with ScalaCheckDrivenPropertyChecks { +class ScanTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { import ScanTest._ def freeScanLaws(freeScan: StringScan): Unit = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SemigroupTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SemigroupTest.scala index b49749545..c49776e87 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SemigroupTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SemigroupTest.scala @@ -1,9 +1,9 @@ package com.twitter.algebird -import org.scalatest.FunSuite import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +import org.scalatest.funsuite.AnyFunSuite -class SemigroupTest extends FunSuite with ScalaCheckPropertyChecks { +class SemigroupTest extends AnyFunSuite with ScalaCheckPropertyChecks { test("Semigroup.maybePlus works") { forAll { s: String => assert(Semigroup.maybePlus(None, s) == s) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala index 00e330635..1ebb5729d 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala @@ -2,10 +2,11 @@ package com.twitter.algebird import org.scalacheck._ import org.scalacheck.Prop._ -import org.scalatest.{Matchers, WordSpec} import org.scalatestplus.scalacheck.Checkers import Arbitrary.arbitrary +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec object SetDiffTest { implicit def arbSetDiff[T: Arbitrary]: Arbitrary[SetDiff[T]] = @@ -14,7 +15,7 @@ object SetDiffTest { }) } -class SetDiffTest extends WordSpec with Matchers with Checkers { +class SetDiffTest extends AnyWordSpec with Matchers with Checkers { import SetDiffTest._ "SetDiff" should { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala index 5dd63e2c4..e72b485c0 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala @@ -2,6 +2,8 @@ package com.twitter.algebird import org.scalatest._ import org.scalacheck.{Arbitrary, Gen} +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec object SketchMapTestImplicits { val DELTA = 1e-6 @@ -34,7 +36,7 @@ class SketchMapLaws extends CheckProperties { } } -class SketchMapTest extends WordSpec with Matchers { +class SketchMapTest extends AnyWordSpec with Matchers { import SketchMapTestImplicits._ import HyperLogLog.int2Bytes diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala index 4506dc95e..86885a17b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala @@ -5,6 +5,8 @@ import org.scalacheck.{Arbitrary, Gen} import org.scalatest._ import scala.util.Try +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec class SpaceSaverLaws extends CheckProperties { @@ -65,7 +67,7 @@ class SpaceSaverLaws extends CheckProperties { } } -class SpaceSaverTest extends WordSpec with Matchers { +class SpaceSaverTest extends AnyWordSpec with Matchers { "SpaceSaver" should { "produce a top 20 with exact bounds" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala index b14af56e8..8180c8d7c 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala @@ -16,9 +16,10 @@ limitations under the License. package com.twitter.algebird -import org.scalatest.{Matchers, PropSpec} import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.{Arbitrary, Gen} +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec object SummingIteratorTest { implicit def mapEquiv[K, V: Monoid: Equiv]: Equiv[Map[K, V]] = @@ -38,7 +39,7 @@ object SummingIteratorTest { } } -class SummingIteratorTest extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class SummingIteratorTest extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { import SummingIteratorTest._ def sumEquiv[V: Semigroup: Equiv](it0: Iterator[V], it1: Iterator[V]): Boolean = StatefulSummerLaws.zeroEquiv(Semigroup.sumOption(it0), Semigroup.sumOption(it1)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala index 24c527e3e..cc8b8098d 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala @@ -1,8 +1,10 @@ package com.twitter.algebird import org.scalatest._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec -class TupleAggregatorsTest extends WordSpec with Matchers { +class TupleAggregatorsTest extends AnyWordSpec with Matchers { // This gives you an implicit conversion from tuples of aggregators // to aggregator of tuples diff --git a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala index a5dab73e7..428c92be9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala @@ -4,7 +4,9 @@ import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.CheckProperties import org.scalacheck.Arbitrary import org.scalacheck.Gen._ -import org.scalatest.{Matchers, _} +import org.scalatest._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec class StatisticsRingLaws extends CheckProperties with Matchers { implicit val statsRing = new StatisticsRing[Int] @@ -20,7 +22,7 @@ class StatisticsMonoidLaws extends CheckProperties with Matchers { property("StatisticsMonoid is a Monoid") { monoidLaws[Int] } } -class StatisticsTest extends WordSpec with Matchers { +class StatisticsTest extends AnyWordSpec with Matchers { // the test framework garbles the exceptions :/ lazy val statsMonoid = new StatisticsMonoid[Int] diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/SyncSummingQueueProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/SyncSummingQueueProperties.scala index bd9f16829..3e060ce77 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/SyncSummingQueueProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/SyncSummingQueueProperties.scala @@ -16,10 +16,11 @@ package com.twitter.algebird.util.summer -import org.scalatest.{Matchers, PropSpec} import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +import org.scalatest.matchers.should.Matchers +import org.scalatest.propspec.AnyPropSpec -class SyncSummingQueueProperties extends PropSpec with ScalaCheckPropertyChecks with Matchers { +class SyncSummingQueueProperties extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { import AsyncSummerLaws._ property("Summing with and without the summer should match") { forAll { diff --git a/build.sbt b/build.sbt index 79b49b6be..820d9cf82 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,8 @@ val bijectionVersion = "0.9.6" val javaEwahVersion = "1.1.7" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.0.8" +val scalaTestVersion = "3.1.0" +val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.2" val scalaCollectionCompat = "2.1.2" val utilVersion = "19.11.0" @@ -272,7 +273,8 @@ lazy val algebirdTest = module("test") libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, - "org.scalatest" %% "scalatest" % scalaTestVersion + "org.scalatest" %% "scalatest" % scalaTestVersion, + "org.scalatestplus" %% "scalatestplus-scalacheck" % scalaTestPlusVersion % "test" ) ++ { if (isScala213x(scalaVersion.value)) { Seq() From edfa0719651d13a7928a9e2ae9341ff961ff7ba2 Mon Sep 17 00:00:00 2001 From: Brian Wignall Date: Fri, 6 Dec 2019 07:34:46 -0500 Subject: [PATCH 007/306] Fix typo (#754) --- docs/src/main/tut/datatypes/approx/countminsketch.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/main/tut/datatypes/approx/countminsketch.md b/docs/src/main/tut/datatypes/approx/countminsketch.md index 96d123f3f..c9b862db6 100644 --- a/docs/src/main/tut/datatypes/approx/countminsketch.md +++ b/docs/src/main/tut/datatypes/approx/countminsketch.md @@ -8,7 +8,7 @@ scaladoc: "#com.twitter.algebird.CountMinSketch" # Count Min Sketch -Count-min sketch is a probablistic data structure that estimates the frequencies of elements in a data stream. Count-min sketches are somewhat similar to Bloom filters; the main distinction is that Bloom filters represent sets, while count-min sketches represent multisets. For more info, see [Wikipedia](https://en.wikipedia.org/wiki/Count%E2%80%93min_sketch). +Count-min sketch is a probabilistic data structure that estimates the frequencies of elements in a data stream. Count-min sketches are somewhat similar to Bloom filters; the main distinction is that Bloom filters represent sets, while count-min sketches represent multisets. For more info, see [Wikipedia](https://en.wikipedia.org/wiki/Count%E2%80%93min_sketch). In Algebird, count-min sketches are represented as the abstract class `CMS`, along with the `CMSMonoid` class. Here's an example usage: From 2274c02775d65d22468624f39e50a6b9aa7ca81c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 9 Dec 2019 19:46:53 +0100 Subject: [PATCH 008/306] Update sbt-scalafmt to 2.3.0 (#755) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index d5bd1a978..83bab4892 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.7") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.2") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.2.1") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") From 9f566418fef488d4cc8f260f9e3f4ccb7adda813 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 12 Dec 2019 07:53:31 -0800 Subject: [PATCH 009/306] (Fix) Update sbt-microsites to 1.0.2 (#756) * Update sbt-microsites to 1.0.2 * Add missing setting --- build.sbt | 1 + project/plugins.sbt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 820d9cf82..313e74d85 100644 --- a/build.sbt +++ b/build.sbt @@ -357,6 +357,7 @@ lazy val docSettings = Seq( "gray-lighter" -> "#F4F3F4", "white-color" -> "#FFFFFF" ), + micrositeCompilingDocsTool := WithTut, autoAPIMappings := true, docsMappingsAPIDir := "api", addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), docsMappingsAPIDir), diff --git a/project/plugins.sbt b/project/plugins.sbt index 83bab4892..fbf8b2ca9 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.7") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.0.2") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.2") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") From 4cbb3b766e51906e24e64dc45f7c34d20068476e Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 13 Dec 2019 09:58:08 +0100 Subject: [PATCH 010/306] Update scala-collection-compat to 2.1.3 (#758) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 313e74d85..4d95ce999 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.1.0" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.2" -val scalaCollectionCompat = "2.1.2" +val scalaCollectionCompat = "2.1.3" val utilVersion = "19.11.0" val sparkVersion = "2.4.4" From 4d5228b51483e4260b4406ceb486b4119c34e126 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 15 Dec 2019 18:39:38 +0100 Subject: [PATCH 011/306] Update util-core to 19.12.0 (#759) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 4d95ce999..88cf2a90c 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.1.0" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.2" val scalaCollectionCompat = "2.1.3" -val utilVersion = "19.11.0" +val utilVersion = "19.12.0" val sparkVersion = "2.4.4" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From cf4f94b1f1883db4c6f251604180cc9f6186fb62 Mon Sep 17 00:00:00 2001 From: Neville Li Date: Mon, 16 Dec 2019 14:29:16 -0500 Subject: [PATCH 012/306] Update sbt to 1.3.5 (#763) * Update sbt to 1.3.5 * move ruby steps to 2.12+JDK8 build only and remove broken `gem update --system` --- .travis.yml | 15 ++++++--------- project/build.properties | 2 +- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7cf95e41b..a8e77be39 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,6 +28,12 @@ matrix: - scala: 2.12.9 jdk: openjdk8 + before_install: + - export PATH=${PATH}:./vendor/bundle + install: + - rvm use 2.6.1 --install --fuzzy + - gem install sass + - gem install jekyll -v 3.2.1 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" \ @@ -52,15 +58,6 @@ matrix: "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" -before_install: - - export PATH=${PATH}:./vendor/bundle - -install: - - rvm use 2.6.1 --install --fuzzy - - gem update --system - - gem install sass - - gem install jekyll -v 3.2.1 - cache: directories: - $HOME/.cache diff --git a/project/build.properties b/project/build.properties index 5a9ed9251..6624da70b 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.4 +sbt.version=1.3.5 From 2421a6bff278f98db1339cff173a7c3c804ea2de Mon Sep 17 00:00:00 2001 From: Neville Li Date: Tue, 17 Dec 2019 11:50:00 -0500 Subject: [PATCH 013/306] Update scalacheck to 1.14.3 (#764) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 88cf2a90c..04469e8f2 100644 --- a/build.sbt +++ b/build.sbt @@ -10,7 +10,7 @@ val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.1.0" val scalaTestPlusVersion = "3.1.0.0-RC2" -val scalacheckVersion = "1.14.2" +val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.3" val utilVersion = "19.12.0" val sparkVersion = "2.4.4" From 5295e58f16c7174604cc5e7ab5fc28a43611b3bc Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 18 Dec 2019 15:22:15 +0100 Subject: [PATCH 014/306] Update sbt-pgp to 2.0.1 (#765) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index fbf8b2ca9..03c0acdbf 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.0.2") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.2") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.0") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") From 01aed476fa11b583dd1bb18cc83d8a60d98f7a2c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 26 Dec 2019 15:28:09 +0100 Subject: [PATCH 015/306] Update sbt to 1.3.6 (#766) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 6624da70b..00b48d978 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.5 +sbt.version=1.3.6 From 3810829754f2ec32d9f6caeb1efbd287da979dfb Mon Sep 17 00:00:00 2001 From: Neville Li Date: Fri, 27 Dec 2019 15:44:29 -0500 Subject: [PATCH 016/306] Update scalafmt-core to 2.3.2 (#768) --- .scalafmt.conf | 4 +- .../algebird/benchmark/CMSBenchmark.scala | 4 +- .../algebird/benchmark/TopCMSBenchmark.scala | 4 +- .../twitter/algebird/BloomFilterTest.scala | 98 +++++++++---------- .../twitter/algebird/HyperLogLogTest.scala | 8 +- .../algebird/MinMaxAggregatorSpec.scala | 19 ++-- project/GenTupleAggregators.scala | 8 +- 7 files changed, 67 insertions(+), 78 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index cce54d550..9bfac3dc3 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,6 +1,6 @@ -version=2.1.0 +version=2.3.2 maxColumn = 110 docstrings = JavaDoc newlines.penalizeSingleSelectMultiArgList = false align.openParenCallSite = false -rewrite.rules = [AvoidInfix, SortImports, RedundantBraces, RedundantParens, PreferCurlyFors] \ No newline at end of file +rewrite.rules = [AvoidInfix, SortImports, RedundantBraces, RedundantParens, PreferCurlyFors] diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSBenchmark.scala index 5439c7db2..5f2ca7d9f 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSBenchmark.scala @@ -52,10 +52,10 @@ object CMSBenchmark { val bitsPerChar = 16 largeStrings = (1 to size).map(_ => nextString(MaxBits / bitsPerChar)).toVector largeBigInts = largeStrings.map(s => BigInt(s.getBytes)) - largeBigDecimals = largeStrings.map(s => { + largeBigDecimals = largeStrings.map { s => val md = (s.head % 256) - 128 BigDecimal(BigInt(s.tail.getBytes)) * BigDecimal(1).pow(md) - }) + } smallLongs = (1 to size).map(_.toLong).toVector smallBigInts = (1 to size).map(BigInt(_)).toVector diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala index 7be3b117e..2ed20543d 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala @@ -53,10 +53,10 @@ object TopCMSBenchmark { val bitsPerChar = 16 largeStrings = (1 to size).map(_ => nextString(MaxBits / bitsPerChar)).toVector largeBigInts = largeStrings.map(s => BigInt(s.getBytes)) - largeBigDecimals = largeStrings.map(s => { + largeBigDecimals = largeStrings.map { s => val md = (s.head % 256) - 128 BigDecimal(BigInt(s.tail.getBytes)) * BigDecimal(1).pow(md) - }) + } smallLongs = (1 to size).map(_.toLong).toVector smallBigInts = (1 to size).map(BigInt(_)).toVector diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala index b732d3795..234405b2b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala @@ -308,15 +308,13 @@ class BloomFilterTest extends AnyWordSpec with Matchers { "identify all true positives" in { (0 to 100).foreach { _ => - { - val bfMonoid = new BloomFilterMonoid[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) - val numEntries = 5 - val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) - val bf = bfMonoid.create(entries: _*) - - entries.foreach { i => - assert(bf.contains(i.toString).isTrue) - } + val bfMonoid = new BloomFilterMonoid[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val numEntries = 5 + val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) + val bf = bfMonoid.create(entries: _*) + + entries.foreach { i => + assert(bf.contains(i.toString).isTrue) } } } @@ -325,29 +323,25 @@ class BloomFilterTest extends AnyWordSpec with Matchers { val iter = 10000 Seq(0.1, 0.01, 0.001).foreach { fpProb => - { - val fps = (0 until iter).map { _ => - { - val numEntries = RAND.nextInt(10) + 1 + val fps = (0 until iter).map { _ => + val numEntries = RAND.nextInt(10) + 1 - val bfMonoid = BloomFilter[String](numEntries, fpProb) + val bfMonoid = BloomFilter[String](numEntries, fpProb) - val entries = RAND - .shuffle((0 until 1000).toList) - .take(numEntries + 1) - .map(_.toString) - val bf = bfMonoid.create(entries.drop(1): _*) + val entries = RAND + .shuffle((0 until 1000).toList) + .take(numEntries + 1) + .map(_.toString) + val bf = bfMonoid.create(entries.drop(1): _*) - if (bf.contains(entries(0)).isTrue) 1.0 else 0.0 - } - } + if (bf.contains(entries(0)).isTrue) 1.0 else 0.0 + } - val observedFpProb = fps.sum / fps.size + val observedFpProb = fps.sum / fps.size - // the 2.5 is a fudge factor to make the probability of it low - // in tests - assert(observedFpProb <= 2.5 * fpProb) - } + // the 2.5 is a fudge factor to make the probability of it low + // in tests + assert(observedFpProb <= 2.5 * fpProb) } } @@ -366,15 +360,13 @@ class BloomFilterTest extends AnyWordSpec with Matchers { "work as an Aggregator" in { (0 to 10).foreach { _ => - { - val aggregator = BloomFilterAggregator[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) - val numEntries = 5 - val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) - val bf = aggregator(entries) - - entries.foreach { i => - assert(bf.contains(i.toString).isTrue) - } + val aggregator = BloomFilterAggregator[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val numEntries = 5 + val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) + val bf = aggregator(entries) + + entries.foreach { i => + assert(bf.contains(i.toString).isTrue) } } } @@ -417,25 +409,23 @@ class BloomFilterTest extends AnyWordSpec with Matchers { "be identical to method `+`" in { (0 to 100).foreach { _ => - { - val bfMonoid = new BloomFilterMonoid[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) - val numEntries = 5 - val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) - val bf = bfMonoid.create(entries: _*) - entries - .map { entry => - (entry, bfMonoid.create(entry)) - } - .foldLeft((bfMonoid.zero, bfMonoid.zero)) { - case ((left, leftAlt), (entry, _)) => - val (newLeftAlt, contained) = leftAlt.checkAndAdd(entry) - left.contains(entry) shouldBe contained - (left + entry, newLeftAlt) - } - - entries.foreach { i => - assert(bf.contains(i.toString).isTrue) + val bfMonoid = new BloomFilterMonoid[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val numEntries = 5 + val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) + val bf = bfMonoid.create(entries: _*) + entries + .map { entry => + (entry, bfMonoid.create(entry)) } + .foldLeft((bfMonoid.zero, bfMonoid.zero)) { + case ((left, leftAlt), (entry, _)) => + val (newLeftAlt, contained) = leftAlt.checkAndAdd(entry) + left.contains(entry) shouldBe contained + (left + entry, newLeftAlt) + } + + entries.foreach { i => + assert(bf.contains(i.toString).isTrue) } } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index 6c1c3f326..3a719c11a 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -388,7 +388,7 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { } "work as an Aggregator and return a HLL" in { - List(5, 7, 8, 10).foreach(bits => { + List(5, 7, 8, 10).foreach { bits => val aggregator = HyperLogLogAggregator(bits) val data = (0 to 10000).map { _ => r.nextInt(1000) @@ -398,11 +398,11 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { val approxCount = aggregator(data.map(int2Bytes(_))).approximateSize.estimate.toDouble assert(scala.math.abs(exact - approxCount) / exact < 3.5 * aveErrorOf(bits)) - }) + } } "work as an Aggregator and return size" in { - List(5, 7, 8, 10).foreach(bits => { + List(5, 7, 8, 10).foreach { bits => val aggregator = HyperLogLogAggregator.sizeAggregator(bits) val data = (0 to 10000).map { _ => r.nextInt(1000) @@ -411,7 +411,7 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { val estimate = aggregator(data.map(int2Bytes(_))) assert(scala.math.abs(exact - estimate) / exact < 3.5 * aveErrorOf(bits)) - }) + } } "correctly downsize sparse HLL" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala index e5fd755ba..e4d99ef03 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala @@ -10,16 +10,15 @@ class MinMaxAggregatorSpec extends AnyWordSpec with Matchers { case object TestElementB extends TestElementParent case object TestElementC extends TestElementParent - implicit val testOrdering = Ordering.fromLessThan[TestElementParent]( - (x, y) => - (x, y) match { - case (TestElementA, TestElementA) => false - case (TestElementA, _) => true - case (TestElementB, TestElementB) => false - case (TestElementB, TestElementA) => false - case (TestElementB, TestElementC) => true - case (TestElementC, _) => false - } + implicit val testOrdering = Ordering.fromLessThan[TestElementParent]((x, y) => + (x, y) match { + case (TestElementA, TestElementA) => false + case (TestElementA, _) => true + case (TestElementB, TestElementB) => false + case (TestElementB, TestElementA) => false + case (TestElementB, TestElementC) => true + case (TestElementC, _) => false + } ) val data = List(TestElementC, TestElementA, TestElementB) diff --git a/project/GenTupleAggregators.scala b/project/GenTupleAggregators.scala index b4fe230fc..3c995a085 100644 --- a/project/GenTupleAggregators.scala +++ b/project/GenTupleAggregators.scala @@ -52,7 +52,7 @@ object MultiAggregator { def genMethods(max: Int, defStr: String, name: Option[String], isMonoid: Boolean = false): String = (2 to max) - .map(i => { + .map { i => val methodName = name.getOrElse("from%d".format(i)) val aggType = if (isMonoid) "Monoid" else "" val nums = (1 to i) @@ -93,7 +93,7 @@ object MultiAggregator { tupleBs, present ) - }) + } .mkString("\n") def genMapMethods(max: Int, isMonoid: Boolean = false): String = { @@ -116,7 +116,7 @@ object MultiAggregator { """.stripMargin (2 to max) - .map(aggrCount => { + .map { aggrCount => val aggrNums = 1 to aggrCount val inputAggs = aggrNums.map(i => s"agg$i: (K, ${inputAggregatorType}[A, B$i, C])").mkString(", ") @@ -142,7 +142,7 @@ object MultiAggregator { | ) | } |}""".stripMargin - }) + } .mkString("\n") + aggregatorForOneItem } } From eee6f0c2d4ca4500756f89ad5f5cffbbfa805962 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 3 Jan 2020 16:30:46 +0100 Subject: [PATCH 017/306] Update junit to 4.13 (#769) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 04469e8f2..d92b0f9ec 100644 --- a/build.sbt +++ b/build.sbt @@ -72,7 +72,7 @@ val sharedSettings = Seq( }, javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), libraryDependencies ++= Seq( - "junit" % "junit" % "4.12" % Test, + "junit" % "junit" % "4.13" % Test, "com.novocode" % "junit-interface" % "0.11" % Test ), // Publishing options: From 49a100eba3d6f671b4c7c160f8ca7bce28af93d2 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 13 Jan 2020 16:18:19 +0100 Subject: [PATCH 018/306] Update sbt-microsites to 1.1.0 (#770) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 03c0acdbf..2ad98bdce 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.0.2") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.2") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") From ed815a47eeadbfc0fe3bec81f18c84eba53d22e3 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Mon, 13 Jan 2020 13:20:00 -0500 Subject: [PATCH 019/306] Update scala to 2.12.10 (#757) --- .travis.yml | 14 +++++++------- README.md | 2 +- build.sbt | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index a8e77be39..eaafb5d6c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ matrix: "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.12.9 + - scala: 2.12.10 jdk: openjdk8 before_install: - export PATH=${PATH}:./vendor/bundle @@ -40,7 +40,7 @@ matrix: "++$TRAVIS_SCALA_VERSION docs/makeMicrosite" \ "++$TRAVIS_SCALA_VERSION mimaReportBinaryIssues" - - scala: 2.12.9 + - scala: 2.12.10 jdk: openjdk11 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ @@ -60,11 +60,11 @@ matrix: cache: directories: - - $HOME/.cache - - $HOME/.sbt/boot/scala* - - $HOME/.sbt/launchers - - $HOME/.ivy2/cache - - $HOME/.nvm + - $HOME/.cache + - $HOME/.sbt/boot/scala* + - $HOME/.sbt/launchers + - $HOME/.ivy2/cache + - $HOME/.nvm before_cache: - du -h -d 1 $HOME/.ivy2/cache diff --git a/README.md b/README.md index f85ee7128..9be015c96 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ See the [Algebird website](https://twitter.github.io/algebird) for more informat ```scala > sbt algebird-core/console -Welcome to Scala 2.12.9 (OpenJDK 64-Bit Server VM, Java 11.0.1). +Welcome to Scala 2.12.10 (OpenJDK 64-Bit Server VM, Java 11.0.1). Type in expressions for evaluation. Or try :help. scala> import com.twitter.algebird._ diff --git a/build.sbt b/build.sbt index d92b0f9ec..a4b79cf1c 100644 --- a/build.sbt +++ b/build.sbt @@ -36,7 +36,7 @@ def isScala213x(scalaVersion: String) = scalaBinaryVersion(scalaVersion) == "2.1 val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.9", + scalaVersion := "2.12.10", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, From e62b99d6729b60aef040261d867aa54e012ade6e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 16 Jan 2020 21:08:07 +0100 Subject: [PATCH 020/306] Update sbt to 1.3.7 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 00b48d978..a82bb05e1 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.6 +sbt.version=1.3.7 From 5a859cf50d0bed598cfe3dc8403a5cd59d8cbbc2 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 16 Jan 2020 21:44:26 +0100 Subject: [PATCH 021/306] Update bijection-core to 0.9.7 (#771) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a4b79cf1c..829c99550 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import com.typesafe.tools.mima.core._ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" -val bijectionVersion = "0.9.6" +val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From f5ae8f0186f4f8a2d691a1d5a044e1fa2455f572 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 17 Jan 2020 17:23:55 +0100 Subject: [PATCH 022/306] Update sbt-release to 1.0.13 (#773) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 2ad98bdce..8557f2c46 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,7 +7,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.2") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.12") +addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") From 2317fdf821720012a629a29ddfa33b85b2b43995 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Tue, 28 Jan 2020 18:20:32 -0500 Subject: [PATCH 023/306] Add algebird-bijection 2.13 support (#776) --- build.sbt | 1 + 1 file changed, 1 insertion(+) diff --git a/build.sbt b/build.sbt index 829c99550..98accf54f 100644 --- a/build.sbt +++ b/build.sbt @@ -304,6 +304,7 @@ lazy val algebirdUtil = module("util") lazy val algebirdBijection = module("bijection") .settings( + crossScalaVersions += "2.13.1", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") From 7f7172573a879aa56f3d215cd83a15e0508a8d00 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 29 Jan 2020 21:44:05 +0100 Subject: [PATCH 024/306] Update sbt-unidoc to 0.4.3 (#777) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8557f2c46..29e0362bc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ resolvers ++= Seq( ) addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.0") -addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.2") +addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.0") From 667f9cf87b97d7f295db587cf94b58e47f855a9e Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 30 Jan 2020 21:35:51 +0100 Subject: [PATCH 025/306] Update util-core to 20.1.0 (#779) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 98accf54f..577d257f3 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.1.0" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.3" -val utilVersion = "19.12.0" +val utilVersion = "20.1.0" val sparkVersion = "2.4.4" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 29602e902c25fef3b2f5d1f7f0b1d528f530ac74 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 30 Jan 2020 21:49:42 +0100 Subject: [PATCH 026/306] Update sbt-scalafmt to 2.3.1 (#778) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 29e0362bc..8d5e86a8c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.0") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") From 0e3fe9d17c856a8ec708fc97038cf1b6a5bf4d86 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 1 Feb 2020 14:26:50 -0500 Subject: [PATCH 027/306] Cleanup around Monoid and Algebra test (#775) * Cleanup Monoid * Cleanup Algebra test * fixup! Cleanup Monoid --- .../scala/com/twitter/algebird/Monoid.scala | 2 +- .../algebird/AbstractAlgebraTest.scala | 22 ++++++++----------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index c0599a650..0255a1e4e 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -148,7 +148,7 @@ class ArrayMonoid[T: ClassTag](implicit semi: Semigroup[T]) extends Monoid[Array val (longer, shorter) = if (left.length > right.length) (left, right) else (right, left) val sum = longer.clone - for (i <- 0 until shorter.length) + for (i <- shorter.indices) sum.update(i, semi.plus(sum(i), shorter(i))) sum diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala index 800111b97..daa5329c7 100755 --- a/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala @@ -25,9 +25,9 @@ class AbstractAlgebraTest extends CheckProperties with Matchers { val monoid = implicitly[Monoid[Option[Int]]] forAll { intList: List[Option[Int]] => - val flattenedList = intList.flatMap(x => x) + val flattenedList = intList.flatten val expectedResult = - if (!flattenedList.isEmpty) Some(flattenedList.sum) else None + if (flattenedList.nonEmpty) Some(flattenedList.sum) else None expectedResult == monoid.sum(intList) } } @@ -37,23 +37,19 @@ class AbstractAlgebraTest extends CheckProperties with Matchers { val minMonoid = implicitly[Monoid[Option[Min[Int]]]] forAll { intList: List[Option[Int]] => val minList = intList.map { - _ match { - case Some(x) => Some(Min(x)) - case None => None - } + case Some(x) => Some(Min(x)) + case None => None } val maxList = intList.map { - _ match { - case Some(x) => Some(Max(x)) - case None => None - } + case Some(x) => Some(Max(x)) + case None => None } - val flattenedList = intList.flatMap(x => x) + val flattenedList = intList.flatten val expectedMax = - if (!flattenedList.isEmpty) Some(Max(flattenedList.max)) else None + if (flattenedList.nonEmpty) Some(Max(flattenedList.max)) else None val expectedMin = - if (!flattenedList.isEmpty) Some(Min(flattenedList.min)) else None + if (flattenedList.nonEmpty) Some(Min(flattenedList.min)) else None expectedMax == maxMonoid.sum(maxList) && expectedMin == minMonoid.sum(minList) From 9bd7e448eb2f846bc7f32c5e9296a5ff402d06b9 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 4 Feb 2020 18:04:20 +0100 Subject: [PATCH 028/306] Update sbt to 1.3.8 (#783) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index a82bb05e1..a919a9b5f 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.7 +sbt.version=1.3.8 From fceec10f9261f060a6fc9626c58ed692ab83e492 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 6 Feb 2020 00:23:01 +0100 Subject: [PATCH 029/306] Update scalacheck-shapeless_1.14 to 1.2.4 (#782) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 577d257f3..f33f4709b 100644 --- a/build.sbt +++ b/build.sbt @@ -322,7 +322,7 @@ lazy val algebirdGeneric = module("generic") crossScalaVersions += "2.13.1", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.3", - "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.3" + "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.4" ) ++ { if (isScala213x(scalaVersion.value)) { Seq() From a91105a411561e7f6cacf7f0ad217ba421778f99 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 7 Feb 2020 16:13:05 +0100 Subject: [PATCH 030/306] Update sbt-mima-plugin to 0.6.3 (#781) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8d5e86a8c..70153174a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.3") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") From 4eea915eac758c7b84e7efd8ba2648ce0e23ee50 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 7 Feb 2020 16:13:24 +0100 Subject: [PATCH 031/306] Update spark-core to 2.4.5 (#784) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f33f4709b..a2858f3d3 100644 --- a/build.sbt +++ b/build.sbt @@ -13,7 +13,7 @@ val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.3" val utilVersion = "20.1.0" -val sparkVersion = "2.4.4" +val sparkVersion = "2.4.5" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = CrossVersion.partialVersion(scalaVersion) match { From 5ede9c9c0511a3edc7a505039bbab95220648a43 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 13 Feb 2020 20:15:46 +0100 Subject: [PATCH 032/306] Update sbt-mima-plugin to 0.6.4 (#785) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 70153174a..98c513ea1 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.3") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.4") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") From 91b3f452d4fc2a9a68a8705a9fb4dab295dde33a Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 13 Feb 2020 20:16:14 +0100 Subject: [PATCH 033/306] Update scala-collection-compat to 2.1.4 (#786) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a2858f3d3..ffcb76498 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.1.0" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" -val scalaCollectionCompat = "2.1.3" +val scalaCollectionCompat = "2.1.4" val utilVersion = "20.1.0" val sparkVersion = "2.4.5" From 62eb3b3a24be4dda360c87c71bd1d37d548b224d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 21 Feb 2020 17:10:41 +0100 Subject: [PATCH 034/306] Update scalatest to 3.1.1 (#789) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index ffcb76498..d70143fd1 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.1.0" +val scalaTestVersion = "3.1.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.4" From bdda739c547af53d34f079bad279e1da60f1c620 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 4 Mar 2020 15:58:34 +0100 Subject: [PATCH 035/306] Update sbt-mima-plugin to 0.7.0 (#792) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 98c513ea1..c5f2e8349 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.6.4") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") From 4e7a0645e22b6560b1d374a2eb2d19cbe9ec73e3 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 4 Mar 2020 15:58:52 +0100 Subject: [PATCH 036/306] Update scalacheck-shapeless_1.14 to 1.2.5 (#793) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d70143fd1..86f71e747 100644 --- a/build.sbt +++ b/build.sbt @@ -322,7 +322,7 @@ lazy val algebirdGeneric = module("generic") crossScalaVersions += "2.13.1", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.3", - "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.4" + "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { Seq() From ee2abdc25547fde26f8d42a9a67dafb6fa77349c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 8 Mar 2020 14:23:09 +0100 Subject: [PATCH 037/306] Update sbt-microsites to 1.1.2 (#790) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index c5f2e8349..e26a85a6f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.0") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.2") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") From 4f3a0f87b592141e9f3232a94d2260af67e74923 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 9 Mar 2020 14:13:05 +0100 Subject: [PATCH 038/306] Update sbt-scalafmt to 2.3.2 (#794) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e26a85a6f..dc0680efc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.2") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") From e209246af9e3b595517c79813734d4ecf23497b0 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 9 Mar 2020 14:13:36 +0100 Subject: [PATCH 039/306] Update scalafmt-core to 2.4.2 (#791) --- .scalafmt.conf | 2 +- .../benchmark/CMSHashingBenchmark.scala | 12 +--- .../algebird/benchmark/HLLBenchmark.scala | 18 ++--- .../benchmark/HLLPresentBenchmark.scala | 4 +- .../algebird/benchmark/QTreeBenchmark.scala | 12 +--- .../benchmark/QTreeMicroBenchmark.scala | 4 +- .../com/twitter/algebird/compat.scala | 4 +- .../com/twitter/algebird/AdaptiveCache.scala | 16 ++--- .../com/twitter/algebird/AdaptiveVector.scala | 16 ++--- .../com/twitter/algebird/AffineFunction.scala | 4 +- .../com/twitter/algebird/Aggregator.scala | 26 ++----- .../com/twitter/algebird/Applicative.scala | 16 ++--- .../com/twitter/algebird/Approximate.scala | 8 +-- .../com/twitter/algebird/AveragedValue.scala | 4 +- .../scala/com/twitter/algebird/Batched.scala | 4 +- .../com/twitter/algebird/CountMinSketch.scala | 24 +++---- .../com/twitter/algebird/Eventually.scala | 4 +- .../scala/com/twitter/algebird/ExpHist.scala | 14 +--- .../scala/com/twitter/algebird/Fold.scala | 32 +++------ .../algebird/GeneratedAbstractAlgebra.scala | 8 +-- .../scala/com/twitter/algebird/Group.scala | 6 +- .../com/twitter/algebird/HyperLogLog.scala | 27 +++----- .../twitter/algebird/HyperLogLogSeries.scala | 2 +- .../com/twitter/algebird/IndexedSeq.scala | 14 ++-- .../scala/com/twitter/algebird/Interval.scala | 2 +- .../com/twitter/algebird/JavaMonoids.scala | 4 +- .../scala/com/twitter/algebird/Last.scala | 4 +- .../com/twitter/algebird/MapAlgebra.scala | 20 ++---- .../com/twitter/algebird/MinHasher.scala | 28 ++++---- .../com/twitter/algebird/MomentsGroup.scala | 4 +- .../scala/com/twitter/algebird/Monad.scala | 10 +-- .../scala/com/twitter/algebird/Monoid.scala | 14 ++-- .../com/twitter/algebird/MurmurHash.scala | 28 ++++---- .../com/twitter/algebird/Predecessible.scala | 2 +- .../scala/com/twitter/algebird/Preparer.scala | 24 ++----- .../scala/com/twitter/algebird/QTree.scala | 16 ++--- .../com/twitter/algebird/SGDMonoid.scala | 13 ++-- .../com/twitter/algebird/Semigroup.scala | 2 +- .../com/twitter/algebird/SketchMap.scala | 10 ++- .../com/twitter/algebird/SpaceSaver.scala | 10 +-- .../com/twitter/algebird/Successible.scala | 2 +- .../com/twitter/algebird/SummingCache.scala | 6 +- .../com/twitter/algebird/SummingQueue.scala | 2 +- .../com/twitter/algebird/TopKMonoid.scala | 4 +- .../com/twitter/algebird/VectorSpace.scala | 4 +- .../scala/com/twitter/algebird/field.scala | 8 +-- .../com/twitter/algebird/macros/Cuber.scala | 4 +- .../com/twitter/algebird/macros/Roller.scala | 4 +- .../algebird/matrix/AdaptiveMatrix.scala | 4 +- .../algebird/matrix/SparseColumnMatrix.scala | 4 +- .../com/twitter/algebird/monad/Reader.scala | 4 +- .../algebird/monad/StateWithError.scala | 40 +++-------- .../twitter/algebird/monad/Trampoline.scala | 4 +- .../statistics/IterCallStatistics.scala | 4 +- .../algebird/statistics/Statistics.scala | 6 +- .../algebird/AlgebraResolutionTest.scala | 8 +-- .../twitter/algebird/spark/AlgebirdRDD.scala | 30 ++++---- .../algebird/spark/AlgebirdRDDTests.scala | 8 +-- .../twitter/algebird/ApplicativeLaws.scala | 12 +--- .../algebird/BaseVectorSpaceProperties.scala | 8 +-- .../com/twitter/algebird/FunctorLaws.scala | 6 +- .../twitter/algebird/MetricProperties.scala | 4 +- .../com/twitter/algebird/MonadLaws.scala | 38 +++-------- .../twitter/algebird/PredecessibleLaws.scala | 4 +- .../twitter/algebird/StatefulSummerLaws.scala | 2 +- .../twitter/algebird/SuccessibleLaws.scala | 4 +- .../algebird/AbstractAlgebraTest.scala | 12 +--- .../algebird/AdJoinedUnitRingLaws.scala | 8 +-- .../com/twitter/algebird/AggregatorLaws.scala | 26 ++----- .../algebird/AppendAggregatorTest.scala | 10 +-- .../twitter/algebird/ApproximateTest.scala | 4 +- .../twitter/algebird/AveragedValueLaws.scala | 8 +-- .../com/twitter/algebird/BatchedTest.scala | 8 +-- .../twitter/algebird/BloomFilterTest.scala | 68 +++++-------------- .../twitter/algebird/CheckProperties.scala | 2 +- .../algebird/CollectionSpecification.scala | 60 ++++++---------- .../com/twitter/algebird/CombinatorTest.scala | 16 ++--- .../twitter/algebird/CountMinSketchTest.scala | 38 ++++------- .../twitter/algebird/DecayedValueLaws.scala | 12 +--- .../com/twitter/algebird/EventuallyTest.scala | 8 +-- .../com/twitter/algebird/ExpHistLaws.scala | 28 ++------ .../com/twitter/algebird/FirstLaws.scala | 10 +-- .../scala/com/twitter/algebird/FoldTest.scala | 24 ++++--- .../algebird/FunctionMonoidTests.scala | 4 +- .../algebird/HyperLogLogSeriesTest.scala | 12 +--- .../twitter/algebird/HyperLogLogTest.scala | 60 +++++----------- .../com/twitter/algebird/IntervalLaws.scala | 60 +++++----------- .../com/twitter/algebird/JavaBoxedTests.scala | 6 +- .../scala/com/twitter/algebird/LastLaws.scala | 10 +-- .../scala/com/twitter/algebird/MaxLaws.scala | 8 +-- .../com/twitter/algebird/MinHasherTest.scala | 16 ++--- .../scala/com/twitter/algebird/MinLaws.scala | 8 +-- .../com/twitter/algebird/MomentsLaws.scala | 4 +- .../twitter/algebird/MonadInstanceLaws.scala | 4 +- .../algebird/NumericSpecification.scala | 2 +- .../algebird/PredecessibleProperties.scala | 6 +- .../com/twitter/algebird/PreparerLaws.scala | 29 ++++---- .../com/twitter/algebird/QTreeTest.scala | 4 +- .../twitter/algebird/RightFolded2Test.scala | 4 +- .../twitter/algebird/RightFoldedTest.scala | 4 +- .../scala/com/twitter/algebird/SGDTest.scala | 4 +- .../com/twitter/algebird/SetDiffTest.scala | 36 +++------- .../com/twitter/algebird/SketchMapTest.scala | 6 +- .../com/twitter/algebird/SpaceSaverTest.scala | 13 ++-- .../algebird/SuccessibleProperties.scala | 6 +- .../algebird/SummingIteratorTest.scala | 8 +-- .../twitter/algebird/SummingQueueTest.scala | 34 +++------- .../com/twitter/algebird/TopKTests.scala | 16 ++--- .../algebird/TupleAggregatorsTest.scala | 4 +- .../algebird/VectorSpaceProperties.scala | 2 +- .../com/twitter/algebird/WindowLawsTest.scala | 4 +- .../algebird/statistics/StatisticsTests.scala | 4 +- .../algebird/util/PromiseLinkMonoid.scala | 4 +- .../twitter/algebird/util/TunnelMonoid.scala | 4 +- .../util/summer/AsyncListMMapSum.scala | 2 +- .../algebird/util/summer/AsyncListSum.scala | 4 +- .../algebird/util/summer/AsyncSummer.scala | 4 +- .../summer/HeavyHittersCachingSummer.scala | 8 +-- .../util/summer/SyncSummingQueue.scala | 2 +- .../util/PromiseLinkMonoidProperties.scala | 2 +- .../util/TunnelMonoidProperties.scala | 8 +-- .../util/summer/AsyncSummerLaws.scala | 16 ++--- build.sbt | 16 ++--- 123 files changed, 486 insertions(+), 1000 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 9bfac3dc3..b7520854a 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.3.2 +version=2.4.2 maxColumn = 110 docstrings = JavaDoc newlines.penalizeSingleSelectMultiArgList = false diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala index c499f52cc..a4e7cca8e 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala @@ -65,9 +65,7 @@ object CMSHashingBenchmark { def setup(): Unit = { random = new scala.util.Random // We draw numbers randomly from a 2^maxBits address space. - inputs = (1 to operations).view.map { _ => - scala.math.BigInt(maxBits, random) - } + inputs = (1 to operations).view.map(_ => scala.math.BigInt(maxBits, random)) } } @@ -92,13 +90,9 @@ class CMSHashingBenchmark { } def timeBrokenCurrentHashWithRandomMaxBitsNumbers(state: CMSState) = - state.inputs.foreach { input => - brokenCurrentHash(state.a, state.b, state.width)(input) - } + state.inputs.foreach(input => brokenCurrentHash(state.a, state.b, state.width)(input)) def timeMurmurHashScalaWithRandomMaxBitsNumbers(state: CMSState) = - state.inputs.foreach { input => - murmurHashScala(state.a, state.b, state.width)(input) - } + state.inputs.foreach(input => murmurHashScala(state.a, state.b, state.width)(input)) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala index 588cd226f..3e76e2eaf 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala @@ -15,7 +15,7 @@ class OldMonoid(bits: Int) extends HyperLogLogMonoid(bits) { if (items.isEmpty) None else { val buffer = new Array[Byte](size) - items.foreach { _.updateInto(buffer) } + items.foreach(_.updateInto(buffer)) Some(DenseHLL(bits, Bytes(buffer))) } } @@ -53,9 +53,7 @@ object HllBenchmark { hllMonoid.batchCreate(elements)(byteEncoder) val inputIntermediate = (0L until numElements).map { _ => - val setElements = (0 until setSize).map { _ => - rng.nextInt(1000).toLong - }.toSet + val setElements = (0 until setSize).map(_ => rng.nextInt(1000).toLong).toSet (pow(numInputKeys, rng.nextFloat).toLong, List(hll(setElements))) } inputData = MapAlgebra.sumByKey(inputIntermediate).map(_._2).toSeq @@ -67,18 +65,12 @@ class HllBenchmark { import HllBenchmark._ @Benchmark def timeSumOption(state: HLLState, bh: Blackhole) = - state.inputData.foreach { vals => - bh.consume(state.hllMonoid.sumOption(vals)) - } + state.inputData.foreach(vals => bh.consume(state.hllMonoid.sumOption(vals))) @Benchmark def timeOldSumOption(state: HLLState, bh: Blackhole) = - state.inputData.foreach { d => - bh.consume(state.oldHllMonoid.sumOption(d)) - } + state.inputData.foreach(d => bh.consume(state.oldHllMonoid.sumOption(d))) @Benchmark def timePlus(state: HLLState, bh: Blackhole) = - state.inputData.foreach { vals => - bh.consume(vals.reduce(state.hllMonoid.plus(_, _))) - } + state.inputData.foreach(vals => bh.consume(vals.reduce(state.hllMonoid.plus(_, _)))) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala index 8d0754342..98898e76e 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala @@ -45,7 +45,5 @@ class HLLPresentBenchmark { @Benchmark def timeBatchCreate(state: HLLPresentState, bh: Blackhole) = - state.data.foreach { hll => - bh.consume(clone(hll).approximateSize) - } + state.data.foreach(hll => bh.consume(clone(hll).approximateSize)) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala index 9bd141ce6..0a7bfce37 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala @@ -34,24 +34,18 @@ object QTreeBenchmark { inputDataUnit = { val rng = new Random("qtree".hashCode) - (0L until numElements).map { _ => - QTree((rng.nextInt(1000).toLong, ())) - } + (0L until numElements).map(_ => QTree((rng.nextInt(1000).toLong, ()))) } inputDataLong = { val rng = new Random("qtree".hashCode) - (0L until numElements).map { _ => - QTree(rng.nextInt(1000).toLong) - } + (0L until numElements).map(_ => QTree(rng.nextInt(1000).toLong)) } inputDataDouble = { val rng = new Random("qtree".hashCode) - (0L until numElements).map { _ => - QTree(rng.nextInt(1000).toDouble) - } + (0L until numElements).map(_ => QTree(rng.nextInt(1000).toDouble)) } } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala index 094339c14..ba2edd112 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala @@ -21,9 +21,7 @@ object QTreeMicroBenchmark { def setup(): Unit = { val rng = new Random("qtree".hashCode) - inputDataLong = (0L until numElements).map { _ => - QTree(rng.nextInt(1000).toLong) - } + inputDataLong = (0L until numElements).map(_ => QTree(rng.nextInt(1000).toLong)) } } diff --git a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala index dd1167470..0d6844d83 100644 --- a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala +++ b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala @@ -22,9 +22,7 @@ private[algebird] trait CompatFold { * Simple Fold that collects elements into a container. */ def container[I, C[_]](implicit cbf: CanBuildFrom[C[I], I, C[I]]): Fold[I, C[I]] = - Fold.foldMutable[Builder[I, C[I]], I, C[I]]({ case (b, i) => b += i }, { _ => - cbf.apply() - }, { _.result }) + Fold.foldMutable[Builder[I, C[I]], I, C[I]]({ case (b, i) => b += i }, _ => cbf.apply(), _.result) } private[algebird] trait CompatDecayedVector { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala index 87dabee44..f481e95ea 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala @@ -31,9 +31,9 @@ import ref.SoftReference class SentinelCache[K, V](implicit sgv: Semigroup[V]) { private val map = new SoftReference(new HashMap[K, V]()) - def size: Int = map.get.map { _.size }.getOrElse(0) + def size: Int = map.get.map(_.size).getOrElse(0) - def clear(): Unit = map.get.foreach { _.clear } + def clear(): Unit = map.get.foreach(_.clear) def stopGrowing(): Unit = map.clear @@ -43,13 +43,11 @@ class SentinelCache[K, V](implicit sgv: Semigroup[V]) { case (k, v) => val newValue = map.get - .flatMap { _.get(k) } - .map { oldV => - sgv.plus(oldV, v) - } + .flatMap(_.get(k)) + .map(oldV => sgv.plus(oldV, v)) .getOrElse(v) - map.get.foreach { _.put(k, newValue) } + map.get.foreach(_.put(k, newValue)) } } } @@ -73,9 +71,7 @@ class AdaptiveCache[K, V: Semigroup](maxCapacity: Int, growthMargin: Double = 3. private val sentinelCache = new SentinelCache[K, V] private def update(evicted: Option[Map[K, V]]) = { - evicted.foreach { e => - sentinelCache.put(e) - } + evicted.foreach(e => sentinelCache.put(e)) var ret = evicted diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala index bfbdeb295..c7d3c74d5 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala @@ -35,7 +35,7 @@ object AdaptiveVector { if (v.size == 0) { fill[V](0)(sparseVal) } else { - val denseCount = v.count { _ != sparseVal } + val denseCount = v.count(_ != sparseVal) val sz = v.size if (denseCount < sz * THRESHOLD) SparseVector(toMap(v, sparseVal), sparseVal, sz) @@ -48,7 +48,7 @@ object AdaptiveVector { } else { val maxIdx = m.keys.max require(maxIdx < sizeOfDense, "Max key (" + maxIdx + ") exceeds valid for size (" + sizeOfDense + ")") - val denseCount = m.count { _._2 != sparseVal } + val denseCount = m.count(_._2 != sparseVal) if (denseCount < sizeOfDense * THRESHOLD) SparseVector(m, sparseVal, sizeOfDense) else @@ -62,7 +62,7 @@ object AdaptiveVector { } def toMap[V](iseq: IndexedSeq[V], sparse: V): Map[Int, V] = - iseq.view.zipWithIndex.filter { _._1 != sparse }.map { _.swap }.toMap + iseq.view.zipWithIndex.filter(_._1 != sparse).map(_.swap).toMap def toVector[V](m: Map[Int, V], sparse: V, size: Int): Vector[V] = { // Mutable local variable to optimize performance @@ -122,9 +122,7 @@ object AdaptiveVector { val sparseAreZero = if (Monoid.isNonZero(v.sparseValue)) (v.denseCount == v.size) else true sparseAreZero && - v.denseIterator.forall { idxv => - !Monoid.isNonZero(idxv._2) - } + v.denseIterator.forall(idxv => !Monoid.isNonZero(idxv._2)) } } private class AVGroup[V: Group] extends AVMonoid[V] with Group[AdaptiveVector[V]] { @@ -222,8 +220,8 @@ case class DenseVector[V](iseq: Vector[V], override val sparseValue: V, override override def denseIterator: Iterator[(Int, V)] = iseq.view.zipWithIndex - .filter { _._1 != sparseValue } - .map { _.swap } + .filter(_._1 != sparseValue) + .map(_.swap) .iterator } @@ -249,6 +247,6 @@ case class SparseVector[V](map: Map[Int, V], override val sparseValue: V, overri } override def extend(cnt: Int): SparseVector[V] = SparseVector(map, sparseValue, size + cnt) - private lazy val sortedList = map.toList.sortBy { _._1 } + private lazy val sortedList = map.toList.sortBy(_._1) override def denseIterator: Iterator[(Int, V)] = sortedList.iterator } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala b/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala index 7c3bfd8b5..1375c5800 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala @@ -21,9 +21,7 @@ package com.twitter.algebird * f(x) = slope * x + intercept */ case class AffineFunction[R](slope: R, intercept: R) extends java.io.Serializable { - def toFn(implicit ring: Ring[R]): Function1[R, R] = { x => - this.apply(x)(ring) - } + def toFn(implicit ring: Ring[R]): Function1[R, R] = { x => this.apply(x)(ring) } def apply(x: R)(implicit ring: Ring[R]): R = ring.plus(ring.times(slope, x), intercept) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala index c8fc6e987..7efe5da31 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala @@ -21,9 +21,7 @@ object Aggregator extends java.io.Serializable { * This is a trivial aggregator that always returns a single value */ def const[T](t: T): MonoidAggregator[Any, Unit, T] = - prepareMonoid { _: Any => - () - }.andThenPresent(_ => t) + prepareMonoid { _: Any => () }.andThenPresent(_ => t) /** * Using Aggregator.prepare,present you can add to this aggregator @@ -160,9 +158,7 @@ object Aggregator extends java.io.Serializable { * How many items satisfy a predicate */ def count[T](pred: T => Boolean): MonoidAggregator[T, Long, Long] = - prepareMonoid { t: T => - if (pred(t)) 1L else 0L - } + prepareMonoid { t: T => if (pred(t)) 1L else 0L } /** * Do any items satisfy some predicate @@ -179,16 +175,12 @@ object Aggregator extends java.io.Serializable { /** * Take the first (left most in reduce order) item found */ - def head[T]: Aggregator[T, T, T] = fromReduce[T] { (l, _) => - l - } + def head[T]: Aggregator[T, T, T] = fromReduce[T]((l, _) => l) /** * Take the last (right most in reduce order) item found */ - def last[T]: Aggregator[T, T, T] = fromReduce[T] { (_, r) => - r - } + def last[T]: Aggregator[T, T, T] = fromReduce[T]((_, r) => r) /** * Get the maximum item @@ -212,9 +204,7 @@ object Aggregator extends java.io.Serializable { * This returns the number of items we find */ def size: MonoidAggregator[Any, Long, Long] = - prepareMonoid { (_: Any) => - 1L - } + prepareMonoid((_: Any) => 1L) /** * Take the smallest `count` items using a heap @@ -306,9 +296,7 @@ object Aggregator extends java.io.Serializable { * Put everything in a Set. Note, this could fill the memory if the Set is very large. */ def toSet[T]: MonoidAggregator[T, Set[T], Set[T]] = - prepareMonoid { t: T => - Set(t) - } + prepareMonoid { t: T => Set(t) } /** * This builds an in-memory Set, and then finally gets the size of that set. @@ -497,7 +485,7 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => Fold.fold[Option[B], A, Option[C]]({ case (None, a) => Some(self.prepare(a)) case (Some(b), a) => Some(self.append(b, a)) - }, None, { _.map(self.present) }) + }, None, _.map(self.present)) def lift: MonoidAggregator[A, Option[B], Option[C]] = new MonoidAggregator[A, Option[B], Option[C]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala index 79a2fff0b..f705c1431 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala @@ -41,16 +41,12 @@ trait Applicative[M[_]] extends Functor[M] { def sequence[T](ms: Seq[M[T]]): M[Seq[T]] = ms match { case Seq() => apply(Seq.empty) - case Seq(m) => map(m) { Seq(_) } - case Seq(m, n) => joinWith(m, n) { Seq(_, _) } + case Seq(m) => map(m)(Seq(_)) + case Seq(m, n) => joinWith(m, n)(Seq(_, _)) case _ => val mb = - ms.foldLeft(apply(Seq.newBuilder[T])) { (mb, mt) => - joinWith(mb, mt) { (b, t) => - b += t - } - } - map(mb) { _.result } + ms.foldLeft(apply(Seq.newBuilder[T]))((mb, mt) => joinWith(mb, mt)((b, t) => b += t)) + map(mb)(_.result) } def joinWith[T, U, V](mt: M[T], mu: M[U])(fn: (T, U) => V): M[V] = map(join(mt, mu)) { case (t, u) => fn(t, u) } @@ -109,9 +105,7 @@ object Applicative { ms: S[M[T]] )(implicit app: Applicative[M], cbf: Factory[T, R[T]]): M[R[T]] = { val bldr = cbf.newBuilder - val mbldr = ms.iterator.foldLeft(app.apply(bldr)) { (mb, mt) => - app.joinWith(mb, mt)(_ += _) - } + val mbldr = ms.iterator.foldLeft(app.apply(bldr))((mb, mt) => app.joinWith(mb, mt)(_ += _)) app.map(mbldr)(_.result) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala b/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala index 8c5d23cb8..80d8a7f53 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala @@ -39,8 +39,8 @@ case class ApproximateBoolean(isTrue: Boolean, withProb: Double) extends Approxi if (isTrue || that.isTrue) { // We need at least one of them to be true: val newP = List(this, that) - .filter { _.isTrue } - .map { _.withProb } + .filter(_.isTrue) + .map(_.withProb) .max ApproximateBoolean(true, newP) } else { @@ -55,8 +55,8 @@ case class ApproximateBoolean(isTrue: Boolean, withProb: Double) extends Approxi } else { // Our confidence is the maximum confidence of the false cases: val newP = List(this, that) - .filterNot { _.isTrue } - .map { _.withProb } + .filterNot(_.isTrue) + .map(_.withProb) .max ApproximateBoolean(false, newP) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala index b8f26bd23..c7018e8b7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala @@ -106,9 +106,7 @@ object AveragedValue { */ def numericAggregator[N](implicit num: Numeric[N]): MonoidAggregator[N, AveragedValue, Double] = Aggregator - .prepareMonoid { n: N => - AveragedValue(num.toDouble(n)) - } + .prepareMonoid { n: N => AveragedValue(num.toDouble(n)) } .andThenPresent(_.value) /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala b/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala index 050c11687..14997976d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala @@ -213,9 +213,7 @@ object Batched { def fold[T](batchSize: Int)(implicit m: Monoid[T]): Fold[T, T] = Fold - .foldLeft[T, Batched[T]](Batched(m.zero)) { (b, t) => - b.combine(Item(t)).compact(batchSize) - } + .foldLeft[T, Batched[T]](Batched(m.zero))((b, t) => b.combine(Item(t)).compact(batchSize)) .map(_.sum) /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index 630b2bf12..294b9e392 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -131,9 +131,7 @@ class CMSMonoid[K: CMSHasher](eps: Double, delta: Double, seed: Int, maxExactCou */ def create(data: Seq[K]): CMS[K] = { val summation = new CMSSummation(params) - data.foreach { k => - summation.insert(k, 1L) - } + data.foreach(k => summation.insert(k, 1L)) summation.result } @@ -345,9 +343,7 @@ object CMSFunctions { val r = new scala.util.Random(seed) val numHashes = depth(delta) val numCounters = width(eps) - (0 to (numHashes - 1)).map { _ => - CMSHash[K](r.nextInt(), 0, numCounters) - } + (0 to (numHashes - 1)).map(_ => CMSHash[K](r.nextInt(), 0, numCounters)) } } @@ -665,7 +661,7 @@ case class SparseCMS[K]( override def innerProduct(other: CMS[K]): Approximate[Long] = exactCountTable.iterator .map { case (x, count) => Approximate.exact(count) * other.frequency(x) } - .reduceOption { _ + _ } + .reduceOption(_ + _) .getOrElse(Approximate.exact(0L)) } @@ -751,7 +747,7 @@ case class CMSInstance[K]( countsTable.getCount((d, w)) * other.countsTable.getCount((d, w)) }.sum - val est = (0 to (depth - 1)).iterator.map { innerProductAtDepth }.min + val est = (0 to (depth - 1)).iterator.map(innerProductAtDepth).min val minimum = math.max(est - (eps * totalCount * other.totalCount).toLong, 0) Approximate(minimum, est, est, 1 - delta) @@ -1085,7 +1081,7 @@ case class TopPctLogic[K](heavyHittersPct: Double) extends HeavyHittersLogic[K] override def purgeHeavyHitters(cms: CMS[K])(hitters: HeavyHitters[K]): HeavyHitters[K] = { val minCount = heavyHittersPct * cms.totalCount - HeavyHitters[K](hitters.hhs.filter { _.count >= minCount }) + HeavyHitters[K](hitters.hhs.filter(_.count >= minCount)) } } @@ -1125,7 +1121,7 @@ case class HeavyHitters[K](hhs: Set[HeavyHitter[K]]) extends java.io.Serializabl def ++(other: HeavyHitters[K]): HeavyHitters[K] = HeavyHitters[K](hhs ++ other.hhs) - def items: Set[K] = hhs.map { _.item } + def items: Set[K] = hhs.map(_.item) } @@ -1307,17 +1303,13 @@ case class ScopedTopNLogic[K1, K2](heavyHittersN: Int) extends HeavyHittersLogic override def purgeHeavyHitters( cms: CMS[(K1, K2)] )(hitters: HeavyHitters[(K1, K2)]): HeavyHitters[(K1, K2)] = { - val grouped = hitters.hhs.groupBy { hh => - hh.item._1 - } + val grouped = hitters.hhs.groupBy(hh => hh.item._1) val (underLimit, overLimit) = grouped.partition { _._2.size <= heavyHittersN } val sorted = overLimit.transform { case (_, hhs) => - hhs.toSeq.sortBy { hh => - hh.count - } + hhs.toSeq.sortBy(hh => hh.count) } val purged = sorted.transform { case (_, hhs) => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index da607321c..927b7e6ff 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -74,9 +74,7 @@ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)( def toEventualBuffer[R](buffer: Buffer[O], e: E): Left[Buffer[E], R] = { val newBuffer = Buffer[E]() - Semigroup.sumOption(buffer).foreach { sum => - newBuffer += convert(sum) - } + Semigroup.sumOption(buffer).foreach(sum => newBuffer += convert(sum)) newBuffer += e Left(newBuffer) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index 2a25abbbe..3237f7923 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -103,11 +103,7 @@ case class ExpHist( def fold: Fold[Bucket, ExpHist] = Fold.foldMutable[Builder[Bucket, Vector[Bucket]], Bucket, ExpHist]({ case (b, bucket) => b += bucket - }, { _ => - Vector.newBuilder[Bucket] - }, { x => - addAll(x.result) - }) + }, _ => Vector.newBuilder[Bucket], x => addAll(x.result)) // This internal method assumes that the instance is stepped forward // already, and does NOT try to step internally. It also assumes @@ -184,9 +180,7 @@ object ExpHist { case class Bucket(size: Long, timestamp: Timestamp) object Bucket { - implicit val ord: Ordering[Bucket] = Ordering.by { b: Bucket => - (b.timestamp, b.size) - } + implicit val ord: Ordering[Bucket] = Ordering.by { b: Bucket => (b.timestamp, b.size) } } /** @@ -326,9 +320,7 @@ object ExpHist { @inline private[this] def bit(i: Int, idx: Int): Int = (i >>> idx) & 1 private[this] def binarize(i: Int, bits: Int, offset: Int): Vector[Int] = - (0 until bits).map { idx => - offset + bit(i, idx) - }.toVector + (0 until bits).map(idx => offset + bit(i, idx)).toVector /** * @param s the number to convert to l-canonical form diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index a757a03a2..0ef7a5e62 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -167,9 +167,7 @@ final class FoldState[X, -I, +O] private[algebird] (val add: (X, I) => X, val st * Transforms the input type of the FoldState (see Fold.contramap). */ def contramap[H](f: H => I): FoldState[X, H, O] = - new FoldState({ (x, h) => - add(x, f(h)) - }, start, end) + new FoldState((x, h) => add(x, f(h)), start, end) } /** @@ -193,9 +191,7 @@ object Fold extends CompatFold { * The accumulator MUST be immutable and serializable. */ def foldLeft[I, O](o: O)(add: (O, I) => O): Fold[I, O] = - fold[O, I, O](add, o, { o => - o - }) + fold[O, I, O](add, o, o => o) /** * A general way of defining Folds that supports a separate accumulator type. @@ -227,19 +223,15 @@ object Fold extends CompatFold { type X = Seq[Any] override def build(): FoldState[Seq[Any], I, Seq[O]] = { val bs: Seq[FoldState[Any, I, O]] = - ms.map { _.build().asInstanceOf[FoldState[Any, I, O]] } + ms.map(_.build().asInstanceOf[FoldState[Any, I, O]]) val adds = - bs.map { _.add } + bs.map(_.add) val ends = - bs.map { _.end } + bs.map(_.end) val starts: Seq[Any] = - bs.map { _.start } - val add: (Seq[Any], I) => Seq[Any] = { (xs, i) => - adds.zip(xs).map { case (f, x) => f(x, i) } - } - val end: (Seq[Any] => Seq[O]) = { xs => - ends.zip(xs).map { case (f, x) => f(x) } - } + bs.map(_.start) + val add: (Seq[Any], I) => Seq[Any] = { (xs, i) => adds.zip(xs).map { case (f, x) => f(x, i) } } + val end: (Seq[Any] => Seq[O]) = { xs => ends.zip(xs).map { case (f, x) => f(x) } } new FoldState(add, starts, end) } } @@ -331,18 +323,14 @@ object Fold extends CompatFold { * Note this does not short-circuit enumeration of the sequence. */ def forall[I](pred: I => Boolean): Fold[I, Boolean] = - foldLeft(true) { (b, i) => - b && pred(i) - } + foldLeft(true)((b, i) => b && pred(i)) /** * A Fold that returns "true" if any element of the sequence statisfies the predicate. * Note this does not short-circuit enumeration of the sequence. */ def exists[I](pred: I => Boolean): Fold[I, Boolean] = - foldLeft(false) { (b, i) => - b || pred(i) - } + foldLeft(false)((b, i) => b || pred(i)) /** * A Fold that counts the number of elements satisfying the predicate. diff --git a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala index 006ec79bc..c9713e928 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala @@ -17,9 +17,7 @@ class Tuple2Semigroup[A, B](implicit asemigroup: Semigroup[A], bsemigroup: Semig else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2) - } + to.foreach { tuple => bufA.put(tuple._1); bufB.put(tuple._2) } Some((bufA.flush.get, bufB.flush.get)) } } @@ -69,9 +67,7 @@ class Tuple3Semigroup[A, B, C]( val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) val bufC = fromSumOption[C](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3) - } + to.foreach { tuple => bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3) } Some((bufA.flush.get, bufB.flush.get, bufC.flush.get)) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Group.scala b/algebird-core/src/main/scala/com/twitter/algebird/Group.scala index 233234021..eb74c2755 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Group.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Group.scala @@ -76,12 +76,10 @@ object NullGroup extends ConstantGroup[Null](null) class OptionGroup[T](implicit group: Group[T]) extends OptionMonoid[T] with Group[Option[T]] { override def isNonZero(opt: Option[T]): Boolean = - opt.exists { group.isNonZero(_) } + opt.exists(group.isNonZero(_)) override def negate(opt: Option[T]): Option[T] = - opt.map { v => - group.negate(v) - } + opt.map(v => group.negate(v)) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index 1cf85e72b..d9c574946 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -49,11 +49,7 @@ object HyperLogLog { val hashSize = 128 private[algebird] val negativePowersOfTwo: Array[Double] = - 0.to(hashSize) - .map { i => - math.pow(2.0, -i) - } - .toArray + 0.to(hashSize).map(i => math.pow(2.0, -i)).toArray def hash(input: Array[Byte]): Array[Byte] = { val (l0, l1) = Hash128.arrayByteHash.hash(input) @@ -405,9 +401,8 @@ case class SparseHLL(override val bits: Int, maxRhow: Map[Int, Max[Byte]]) exten override lazy val zeroCnt: Int = size - maxRhow.size - override lazy val z: Double = 1.0 / (zeroCnt.toDouble + maxRhow.values.map { mj => - HyperLogLog.negativePowersOfTwo(mj.get) - }.sum) + override lazy val z: Double = + 1.0 / (zeroCnt.toDouble + maxRhow.values.map(mj => HyperLogLog.negativePowersOfTwo(mj.get)).sum) override def +(other: HLL): HLL = other match { @@ -575,7 +570,7 @@ class HyperLogLogMonoid(val bits: Int) extends Monoid[HLL] with BoundedSemilatti private[this] final def denseUpdate(existing: HLL, iter: Iterator[HLL]): HLL = { val buffer = new Array[Byte](size) existing.updateInto(buffer) - iter.foreach { _.updateInto(buffer) } + iter.foreach(_.updateInto(buffer)) DenseHLL(bits, Bytes(buffer)) } @@ -612,9 +607,7 @@ class HyperLogLogMonoid(val bits: Int) extends Monoid[HLL] with BoundedSemilatti @deprecated("Use toHLL", since = "0.10.0 / 2015-05") def batchCreate[T](instances: Iterable[T])(implicit ev: T => Array[Byte]): HLL = { val allMaxRhow = instances - .map { x => - jRhoW(hash(x), bits) - } + .map(x => jRhoW(hash(x), bits)) .groupBy { case (j, _) => j } .map { case (j, iter) => (j, Max(iter.maxBy(_._2)._2)) } if (allMaxRhow.size * 16 <= size) { @@ -650,9 +643,9 @@ class HyperLogLogMonoid(val bits: Int) extends Monoid[HLL] with BoundedSemilatti * since + on HLLInstance creates the instance for the union. */ sizeOf(head) + intersectionSize(tail) - - intersectionSize(tail.map { _ + head }) + intersectionSize(tail.map(_ + head)) } - .map { _.withMin(0L) } // We always know the intersection is >= 0 + .map(_.withMin(0L)) // We always know the intersection is >= 0 .getOrElse(Approximate.exact(0L)) // Empty lists have no intersection } @@ -740,7 +733,7 @@ abstract class SetSizeAggregatorBase[A](hllBits: Int, maxSetSize: Int) override val leftSemigroup = new HyperLogLogMonoid(hllBits) override val rightAggregator: MonoidAggregator[A, Set[A], Long] = - Aggregator.uniqueCount[A].andThenPresent { _.toLong } + Aggregator.uniqueCount[A].andThenPresent(_.toLong) } case class SetSizeAggregator[A](hllBits: Int, maxSetSize: Int = 10)(implicit toBytes: A => Array[Byte]) @@ -756,7 +749,5 @@ case class SetSizeAggregator[A](hllBits: Int, maxSetSize: Int = 10)(implicit toB case class SetSizeHashAggregator[A](hllBits: Int, maxSetSize: Int = 10)(implicit hash: Hash128[A]) extends SetSizeAggregatorBase[A](hllBits, maxSetSize) { override def convert(set: Set[A]): HLL = - leftSemigroup.sum(set.iterator.map { a => - leftSemigroup.toHLL(a)(hash) - }) + leftSemigroup.sum(set.iterator.map(a => leftSemigroup.toHLL(a)(hash))) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala index b1a9b02ec..328e00c33 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala @@ -84,7 +84,7 @@ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { * @return New HLLSeries only including RhoWs for values seen at or after the given timestamp */ def since(threshold: Long): HLLSeries = - HLLSeries(bits, rows.map { _.filter { case (_, ts) => ts >= threshold } }) + HLLSeries(bits, rows.map(_.filter { case (_, ts) => ts >= threshold })) def toHLL: HLL = { val monoid = new HyperLogLogMonoid(bits) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala b/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala index c126f5745..cbbafdb78 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala @@ -36,9 +36,7 @@ class IndexedSeqSemigroup[T](implicit semi: Semigroup[T]) extends Semigroup[Inde val sum = leftSummand .zip(rightSummand) - .map { tup => - semi.plus(tup._1, tup._2) - } + .map(tup => semi.plus(tup._1, tup._2)) (sum ++ remainder).toIndexedSeq } @@ -49,15 +47,13 @@ class IndexedSeqMonoid[T](implicit mont: Monoid[T]) with Monoid[IndexedSeq[T]] { override def zero: IndexedSeq[T] = IndexedSeq.empty[T] override def isNonZero(v: IndexedSeq[T]): Boolean = - v.exists { t => - mont.isNonZero(t) - } + v.exists(t => mont.isNonZero(t)) } class IndexedSeqGroup[T](implicit grp: Group[T]) extends IndexedSeqMonoid[T]()(grp) with Group[IndexedSeq[T]] { - override def negate(g: IndexedSeq[T]): IndexedSeq[T] = g.map { grp.negate(_) } + override def negate(g: IndexedSeq[T]): IndexedSeq[T] = g.map(grp.negate(_)) } class IndexedSeqRing[T](implicit rng: Ring[T]) extends IndexedSeqGroup[T]()(rng) with Ring[IndexedSeq[T]] { @@ -70,8 +66,6 @@ class IndexedSeqRing[T](implicit rng: Ring[T]) extends IndexedSeqGroup[T]()(rng) // We don't need to pad, because 0 * x = 0 left.view .zip(right) - .map { tup => - rng.times(tup._1, tup._2) - } + .map(tup => rng.times(tup._1, tup._2)) .toIndexedSeq } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala index 9e3e9b67e..33094f881 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala @@ -87,7 +87,7 @@ object Interval extends java.io.Serializable { type ExLowInUp[T] = Intersection[ExclusiveLower, InclusiveUpper, T] implicit def monoid[T: Ordering]: Monoid[Interval[T]] = - Monoid.from[Interval[T]](Universe[T]()) { _ && _ } + Monoid.from[Interval[T]](Universe[T]())(_ && _) // Automatically convert from a MaybeEmpty instance implicit def fromMaybeEmpty[T, NonEmpty[t] <: Interval[t]](me: MaybeEmpty[T, NonEmpty]): Interval[T] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala index a675856ec..e26e1f236 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala @@ -114,9 +114,7 @@ class JMapMonoid[K, V: Semigroup] extends Monoid[JMap[K, V]] { override def isNonZero(x: JMap[K, V]): Boolean = !x.isEmpty && (implicitly[Semigroup[V]] match { case mon: Monoid[_] => - x.values.asScala.exists { v => - mon.isNonZero(v) - } + x.values.asScala.exists(v => mon.isNonZero(v)) case _ => true }) override def plus(x: JMap[K, V], y: JMap[K, V]): JHashMap[K, V] = { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Last.scala b/algebird-core/src/main/scala/com/twitter/algebird/Last.scala index 1d0379180..edfc4786a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Last.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Last.scala @@ -57,9 +57,7 @@ private[algebird] sealed abstract class LastInstances { override def plus(l: T, r: T): T = r override def sumOption(ts: TraversableOnce[T]): Option[T] = { var res: Option[T] = None - ts.foreach { t => - res = Some(t) - } + ts.foreach(t => res = Some(t)) res } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala index 688d69ee9..ed2616264 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala @@ -39,9 +39,7 @@ abstract class GenericMapMonoid[K, V, M <: ScMap[K, V]](implicit val semigroup: override def isNonZero(x: M): Boolean = !x.isEmpty && (semigroup match { case mon: Monoid[_] => - x.valuesIterator.exists { v => - mon.isNonZero(v) - } + x.valuesIterator.exists(v => mon.isNonZero(v)) case _ => true }) @@ -190,9 +188,7 @@ object MapAlgebra { keys: TraversableOnce[T] )(lookup: T => Option[V])(present: T => U): Map[U, V] = sumByKey { - keys.iterator.map { k => - present(k) -> lookup(k).getOrElse(Monoid.zero[V]) - } + keys.iterator.map(k => present(k) -> lookup(k).getOrElse(Monoid.zero[V])) } // Returns a new map with zero-value entries removed @@ -211,7 +207,7 @@ object MapAlgebra { * pairs.groupBy(_._1).mapValues(_.map(_._2).sum) */ def sumByKey[K, V: Semigroup](pairs: TraversableOnce[(K, V)]): Map[K, V] = - Monoid.sum(pairs.iterator.map { Map(_) }) + Monoid.sum(pairs.iterator.map(Map(_))) /** * For each key, creates a list of all values. This function is equivalent to: @@ -260,7 +256,7 @@ object MapAlgebra { def nonEmptyIter[T](i: Iterable[T]): Iterable[Option[T]] = if (i.isEmpty) Iterable(None) else { - i.map { Some(_) } + i.map(Some(_)) } Monoid.sum { @@ -301,9 +297,7 @@ object MapAlgebra { def cubeAggregate[T, K, U, V](it: TraversableOnce[T], agg: Aggregator[T, U, V])( fn: T => K )(implicit c: Cuber[K]): Map[c.K, V] = - sumByKey(it.iterator.flatMap { t => - c(fn(t)).iterator.map((_, agg.prepare(t))) - })(agg.semigroup) + sumByKey(it.iterator.flatMap(t => c(fn(t)).iterator.map((_, agg.prepare(t)))))(agg.semigroup) .map { case (k, v) => (k, agg.present(v)) } def rollup[K, V](it: TraversableOnce[(K, V)])(implicit r: Roller[K]): Map[r.K, List[V]] = { @@ -327,9 +321,7 @@ object MapAlgebra { def rollupAggregate[T, K, U, V](it: TraversableOnce[T], agg: Aggregator[T, U, V])( fn: T => K )(implicit r: Roller[K]): Map[r.K, V] = - sumByKey(it.iterator.flatMap { t => - r(fn(t)).iterator.map((_, agg.prepare(t))) - })(agg.semigroup) + sumByKey(it.iterator.flatMap(t => r(fn(t)).iterator.map((_, agg.prepare(t)))))(agg.semigroup) .map { case (k, v) => (k, agg.present(v)) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala index 621f787e1..5efa54a94 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala @@ -73,39 +73,35 @@ abstract class MinHasher[H](val numHashes: Int, val numBands: Int)(implicit n: N private val hashFunctions = { val r = new scala.util.Random(seed) val numHashFunctions = math.ceil(numBytes / 16.0).toInt - (1 to numHashFunctions).map { _ => - MurmurHash128(r.nextLong) - } + (1 to numHashFunctions).map(_ => MurmurHash128(r.nextLong)) } /** Signature for empty set, needed to be a proper Monoid */ - override val zero: MinHashSignature = MinHashSignature(buildArray { maxHash }) + override val zero: MinHashSignature = MinHashSignature(buildArray(maxHash)) /** Set union */ override def plus(left: MinHashSignature, right: MinHashSignature): MinHashSignature = - MinHashSignature(buildArray(left.bytes, right.bytes) { (l, r) => - n.min(l, r) - }) + MinHashSignature(buildArray(left.bytes, right.bytes)((l, r) => n.min(l, r))) /** Esimate Jaccard similarity (size of union / size of intersection) */ def similarity(left: MinHashSignature, right: MinHashSignature): Double = - buildArray(left.bytes, right.bytes) { (l, r) => - if (l == r) n.one else n.zero - }.map { _.toDouble }.sum / numHashes + buildArray(left.bytes, right.bytes)((l, r) => if (l == r) n.one else n.zero) + .map(_.toDouble) + .sum / numHashes /** Bucket keys to use for quickly finding other similar items via locality sensitive hashing */ def buckets(sig: MinHashSignature): List[Long] = sig.bytes .grouped(numRows * hashSize) - .filter { _.size == numRows * hashSize } - .map { hashFunctions.head(_)._1 } + .filter(_.size == numRows * hashSize) + .map(hashFunctions.head(_)._1) .toList /** Create a signature for a single Long value */ - def init(value: Long): MinHashSignature = init { _(value) } + def init(value: Long): MinHashSignature = init(_(value)) /** Create a signature for a single String value */ - def init(value: String): MinHashSignature = init { _(value) } + def init(value: String): MinHashSignature = init(_(value)) /** Create a signature for an arbitrary value */ def init(fn: MurmurHash128 => (Long, Long)): MinHashSignature = { @@ -162,7 +158,7 @@ class MinHasher32(numHashes: Int, numBands: Int) extends MinHasher[Int](numHashe ): Array[Byte] = { val leftBuffer = ByteBuffer.wrap(left).asIntBuffer val rightBuffer = ByteBuffer.wrap(right).asIntBuffer - buildArray { fn(leftBuffer.get, rightBuffer.get) } + buildArray(fn(leftBuffer.get, rightBuffer.get)) } /** Seems to work, but experimental and not generic yet */ @@ -199,6 +195,6 @@ class MinHasher16(numHashes: Int, numBands: Int) extends MinHasher[Char](numHash ): Array[Byte] = { val leftBuffer = ByteBuffer.wrap(left).asCharBuffer val rightBuffer = ByteBuffer.wrap(right).asCharBuffer - buildArray { fn(leftBuffer.get, rightBuffer.get) } + buildArray(fn(leftBuffer.get, rightBuffer.get)) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index f27cd71a7..2c96aeda3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -62,9 +62,7 @@ object Moments { val aggregator: MomentsAggregator.type = MomentsAggregator def numericAggregator[N](implicit num: Numeric[N]): MonoidAggregator[N, Moments, Moments] = - Aggregator.prepareMonoid { n: N => - Moments(num.toDouble(n)) - } + Aggregator.prepareMonoid { n: N => Moments(num.toDouble(n)) } // Create a Moments object given a single value. This is useful for // initializing moment calculations at the start of a stream. diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala index cf4dc6937..0a74804aa 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala @@ -38,11 +38,7 @@ trait Monad[M[_]] extends Applicative[M] { override def map[T, U](m: M[T])(fn: (T) => U): M[U] = flatMap(m)((t: T) => apply(fn(t))) override def join[T, U](mt: M[T], mu: M[U]): M[(T, U)] = - flatMap(mt) { (t: T) => - map(mu) { (u: U) => - (t, u) - } - } + flatMap(mt)((t: T) => map(mu)((u: U) => (t, u))) } /** @@ -65,9 +61,7 @@ object Monad { if (xs.isEmpty) monad.apply(acc) else - monad.flatMap(fn(acc, xs.head)) { t: T => - foldM(t, xs.tail)(fn) - } + monad.flatMap(fn(acc, xs.head)) { t: T => foldM(t, xs.tail)(fn) } // Some instances of the Monad typeclass (case for a macro): implicit val list: Monad[List] = new Monad[List] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index 0255a1e4e..3962a0321 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -83,7 +83,7 @@ class OptionMonoid[T](implicit semi: Semigroup[T]) extends Monoid[Option[T]] { } override def sumOption(items: TraversableOnce[Option[T]]): Option[Option[T]] = if (items.isEmpty) None - else Some(semi.sumOption(items.filter(_.isDefined).map { _.get })) + else Some(semi.sumOption(items.filter(_.isDefined).map(_.get))) } class EitherMonoid[L, R](implicit semigroupl: Semigroup[L], monoidr: Monoid[R]) @@ -113,7 +113,7 @@ class ListMonoid[T] extends Monoid[List[T]] { // ListBuilder mutates the tail of the list until // result is called so that it is O(N) to push N things on, not N^2 val builder = List.newBuilder[T] - items.foreach { builder ++= _ } + items.foreach(builder ++= _) Some(builder.result()) } } @@ -126,7 +126,7 @@ class SeqMonoid[T] extends Monoid[Seq[T]] { if (items.isEmpty) None else { val builder = Seq.newBuilder[T] - items.foreach { builder ++= _ } + items.foreach(builder ++= _) Some(builder.result()) } } @@ -171,9 +171,7 @@ class SetMonoid[T] extends Monoid[Set[T]] { if (items.isEmpty) None else { val builder = Set.newBuilder[T] - items.foreach { s => - builder ++= s - } + items.foreach(s => builder ++= s) Some(builder.result()) } } @@ -187,9 +185,7 @@ class Function1Monoid[T] extends Monoid[Function1[T, T]] { // (f1 + f2)(x) = f2(f1(x)) so that: // listOfFn.foldLeft(x) { (v, fn) => fn(v) } = (Monoid.sum(listOfFn))(x) - override def plus(f1: Function1[T, T], f2: Function1[T, T]): T => T = { (t: T) => - f2(f1(t)) - } + override def plus(f1: Function1[T, T], f2: Function1[T, T]): T => T = { (t: T) => f2(f1(t)) } } // To use the OrValMonoid wrap your item in a OrVal object diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala b/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala index 01729b754..6a5c4653e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala @@ -16,33 +16,31 @@ case class MurmurHash128(seed: Long) extends AnyVal { apply(buffer, 0, maxBytes) } def apply(array: Array[Char]): (Long, Long) = - apply(array.size * 2, { _.asCharBuffer.put(array) }) + apply(array.size * 2, _.asCharBuffer.put(array)) def apply(array: Array[Short]): (Long, Long) = - apply(array.size * 2, { _.asShortBuffer.put(array) }) + apply(array.size * 2, _.asShortBuffer.put(array)) def apply(array: Array[Int]): (Long, Long) = - apply(array.size * 4, { _.asIntBuffer.put(array) }) + apply(array.size * 4, _.asIntBuffer.put(array)) def apply(array: Array[Float]): (Long, Long) = - apply(array.size * 4, { _.asFloatBuffer.put(array) }) + apply(array.size * 4, _.asFloatBuffer.put(array)) def apply(array: Array[Long]): (Long, Long) = - apply(array.size * 8, { _.asLongBuffer.put(array) }) + apply(array.size * 8, _.asLongBuffer.put(array)) def apply(array: Array[Double]): (Long, Long) = - apply(array.size * 8, { _.asDoubleBuffer.put(array) }) + apply(array.size * 8, _.asDoubleBuffer.put(array)) - def apply(value: Char): (Long, Long) = apply(2, { _.asCharBuffer.put(value) }) + def apply(value: Char): (Long, Long) = apply(2, _.asCharBuffer.put(value)) def apply(value: Short): (Long, Long) = - apply(2, { _.asShortBuffer.put(value) }) - def apply(value: Int): (Long, Long) = apply(4, { _.asIntBuffer.put(value) }) + apply(2, _.asShortBuffer.put(value)) + def apply(value: Int): (Long, Long) = apply(4, _.asIntBuffer.put(value)) def apply(value: Float): (Long, Long) = - apply(4, { _.asFloatBuffer.put(value) }) - def apply(value: Long): (Long, Long) = apply(8, { _.asLongBuffer.put(value) }) + apply(4, _.asFloatBuffer.put(value)) + def apply(value: Long): (Long, Long) = apply(8, _.asLongBuffer.put(value)) def apply(value: Double): (Long, Long) = - apply(8, { _.asDoubleBuffer.put(value) }) + apply(8, _.asDoubleBuffer.put(value)) def apply(string: CharSequence): (Long, Long) = apply(string.length * 2, { buffer => val charBuffer = buffer.asCharBuffer - 0.to(string.length - 1).foreach { i => - charBuffer.put(string.charAt(i)) - } + 0.to(string.length - 1).foreach(i => charBuffer.put(string.charAt(i))) }) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala b/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala index 0d89d7d26..880627177 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala @@ -32,7 +32,7 @@ trait Predecessible[T] extends java.io.Serializable { new AbstractIterable[T] { override def iterator: Iterator[T] = Iterator - .iterate[Option[T]](Some(old)) { self.prev(_) } + .iterate[Option[T]](Some(old))(self.prev(_)) .takeWhile(_.isDefined) .collect { case Some(t) => t } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala b/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala index 7e421d9ad..f576498d3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala @@ -34,14 +34,10 @@ sealed trait Preparer[A, T] extends java.io.Serializable { * Filter out values that do not meet the predicate. * Like flatMap, this limits future aggregations to MonoidAggregator. */ - def filter(fn: T => Boolean): FlatMapPreparer[A, T] = flatMap { t => - if (fn(t)) Some(t) else None - } + def filter(fn: T => Boolean): FlatMapPreparer[A, T] = flatMap(t => if (fn(t)) Some(t) else None) def collect[U](p: PartialFunction[T, U]): FlatMapPreparer[A, U] = - flatMap { t => - if (p.isDefinedAt(t)) Some(p(t)) else None - } + flatMap(t => if (p.isDefinedAt(t)) Some(p(t)) else None) /** * count and following methods all just call monoidAggregate with one of the standard Aggregators. @@ -195,14 +191,10 @@ trait FlatMapPreparer[A, T] extends Preparer[A, T] { def prepareFn: A => TraversableOnce[T] def map[U](fn: T => U): FlatMapPreparer[A, U] = - FlatMapPreparer { a: A => - prepareFn(a).map(fn) - } + FlatMapPreparer { a: A => prepareFn(a).map(fn) } override def flatMap[U](fn: T => TraversableOnce[U]): FlatMapPreparer[A, U] = - FlatMapPreparer { a: A => - prepareFn(a).flatMap(fn) - } + FlatMapPreparer { a: A => prepareFn(a).flatMap(fn) } override def monoidAggregate[B, C](aggregator: MonoidAggregator[T, B, C]): MonoidAggregator[A, B, C] = aggregator.sumBefore.composePrepare(prepareFn) @@ -256,14 +248,10 @@ object FlatMapPreparer { override val prepareFn: TraversableOnce[A] => TraversableOnce[A] = (a: TraversableOnce[A]) => a override def map[U](fn: A => U): FlatMapPreparer[TraversableOnce[A], U] = - FlatMapPreparer { a: TraversableOnce[A] => - a.map(fn) - } + FlatMapPreparer { a: TraversableOnce[A] => a.map(fn) } override def flatMap[U](fn: A => TraversableOnce[U]): FlatMapPreparer[TraversableOnce[A], U] = - FlatMapPreparer { a: TraversableOnce[A] => - a.flatMap(fn) - } + FlatMapPreparer { a: TraversableOnce[A] => a.flatMap(fn) } override def monoidAggregate[B, C]( aggregator: MonoidAggregator[A, B, C] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index baad513ee..3db3a4c0d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -135,9 +135,7 @@ object QTree { private[algebird] val cachedRangeLowerBound: Int = cachedRangeCacheSize * -1 private[algebird] val rangeLut: Array[Double] = (cachedRangeLowerBound until cachedRangeCacheSize) - .map { level => - math.pow(2.0, level) - } + .map(level => math.pow(2.0, level)) .toArray[Double] } @@ -348,7 +346,7 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( (s, s) } else if (from < upperBound && to >= lowerBound) { val ((lower1, upper1), (lower2, upper2)) = - mapChildrenWithDefault((0L, 0L)) { _.rangeCountBounds(from, to) } + mapChildrenWithDefault((0L, 0L))(_.rangeCountBounds(from, to)) (lower1 + lower2, parentCount + upper1 + upper2) } else { (0L, 0L) @@ -406,7 +404,7 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( * to estimate serialization size. */ def size: Int = { - val childSizes = mapChildrenWithDefault(0) { _.size } + val childSizes = mapChildrenWithDefault(0)(_.size) 1 + childSizes._1 + childSizes._2 } @@ -414,7 +412,7 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( * Total sum over the entire tree. */ def totalSum(implicit monoid: Monoid[A]): A = { - val childSums = mapChildrenWithDefault(monoid.zero) { _.totalSum } + val childSums = mapChildrenWithDefault(monoid.zero)(_.totalSum) monoid.plus(sum, monoid.plus(childSums._1, childSums._2)) } @@ -422,7 +420,7 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( (lowerChild.map(fn).getOrElse(default), upperChild.map(fn).getOrElse(default)) private def parentCount = { - val childCounts = mapChildrenWithDefault(0L) { _.count } + val childCounts = mapChildrenWithDefault(0L)(_.count) _count - childCounts._1 - childCounts._2 } @@ -437,8 +435,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( print(" (" + parentCount + ")") } println(" {" + _sum + "}") - lowerChild.foreach { _.dump } - upperChild.foreach { _.dump } + lowerChild.foreach(_.dump) + upperChild.foreach(_.dump) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala index b97e3d2b8..ec94c9cfa 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala @@ -29,20 +29,17 @@ object SGD { val xsPlusConst = xs :+ 1.0 val err = dot(w, xsPlusConst) - y // Here is the gradient - xsPlusConst.map { _ * err } + xsPlusConst.map(_ * err) } def dot(x: IndexedSeq[Double], y: IndexedSeq[Double]): Double = x.view.zip(y).map { case (a: Double, b: Double) => a * b }.sum // Here are some step algorithms: - def constantStep(s: Double): (Long, IndexedSeq[Double]) => Double = { (_, _) => - s - } + def constantStep(s: Double): (Long, IndexedSeq[Double]) => Double = { (_, _) => s } // A standard: a/(steps + b)^c def countAdaptiveStep(a: Double, b: Double, c: Double = 1.0): (Long, IndexedSeq[Double]) => Double = { - (cnt, _) => - a / scala.math.pow((cnt + b), c) + (cnt, _) => a / scala.math.pow((cnt + b), c) } def weightsOf[T](s: SGD[T]): Option[IndexedSeq[Double]] = @@ -99,9 +96,7 @@ class SGDMonoid[Pos]( case (_, SGDZero) => left case (SGDPos(llps), SGDPos(rlps)) => SGDPos(llps ::: rlps) case (rsw @ SGDWeights(_, _), SGDPos(p)) => - p.foldLeft(rsw) { (cntWeight, pos) => - newWeights(cntWeight, pos) - } + p.foldLeft(rsw)((cntWeight, pos) => newWeights(cntWeight, pos)) // TODO make a RightFolded2 which folds A,B => (B,C), and a group on C. case _ => right } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala index f7a5204d4..562442703 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala @@ -57,7 +57,7 @@ trait Semigroup[@specialized(Int, Long, Float, Double) T] extends ASemigroup[T] * `iter.reduceLeftOption` using [[plus]]. */ def sumOption(iter: TraversableOnce[T]): Option[T] = - iter.reduceLeftOption { plus(_, _) } + iter.reduceLeftOption(plus(_, _)) /* * These are methods from algebra diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala index f3e6953d4..738567a5a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala @@ -95,8 +95,8 @@ class SketchMapMonoid[K, V](val params: SketchMapParams[K])( * Create a Sketch Map sketch from a sequence of pairs. */ def create(data: Seq[(K, V)]): SketchMap[K, V] = { - val heavyHitters = data.map { _._1 } - val totalValue = Monoid.sum(data.map { _._2 }) + val heavyHitters = data.map(_._1) + val totalValue = Monoid.sum(data.map(_._2)) val initTable = AdaptiveMatrix.fill[V](params.depth, params.width)(monoid.zero) /* For each row, update the table for each K,V pair */ @@ -128,9 +128,7 @@ class SketchMapMonoid[K, V](val params: SketchMapParams[K])( * Returns a sorted list of heavy hitter key/value tuples. */ def heavyHitters(sm: SketchMap[K, V]): List[(K, V)] = - sm.heavyHitterKeys.map { item => - (item, frequency(sm, item)) - } + sm.heavyHitterKeys.map(item => (item, frequency(sm, item))) } /** @@ -169,7 +167,7 @@ case class SketchMapParams[K](seed: Int, width: Int, depth: Int, heavyHittersCou def updatedHeavyHitters[V: Ordering](hitters: Seq[K], table: AdaptiveMatrix[V]): List[K] = { val mapping: Map[K, V] = hitters.iterator.map(item => (item, frequency(item, table))).toMap - val specificOrdering = Ordering.by[K, V] { mapping(_) }.reverse + val specificOrdering = Ordering.by[K, V](mapping(_)).reverse hitters.sorted(specificOrdering).take(heavyHittersCount).toList } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 4e65fc944..18b104f2a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -49,7 +49,11 @@ object SpaceSaver { .put(itemAsBytes) buffer - case SSMany(capacity, counters, _) => //We do not care about the buckets are thery are created by SSMany.apply + case SSMany( + capacity, + counters, + _ + ) => //We do not care about the buckets are thery are created by SSMany.apply val buffer = scala.collection.mutable.ArrayBuffer.newBuilder[Byte] buffer += (2: Byte) @@ -190,9 +194,7 @@ sealed abstract class SpaceSaver[T] { * Returns boolean indicating if they are consistent */ def consistentWith(that: SpaceSaver[T]): Boolean = - (counters.keys ++ that.counters.keys).forall { item => - (frequency(item) - that.frequency(item)) ~ 0 - } + (counters.keys ++ that.counters.keys).forall(item => (frequency(item) - that.frequency(item)) ~ 0) } case class SSOne[T] private[algebird] (override val capacity: Int, item: T) extends SpaceSaver[T] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala b/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala index 8828a40db..0e38b51dd 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala @@ -35,7 +35,7 @@ trait Successible[T] extends Serializable { new AbstractIterable[T] { override def iterator: Iterator[T] = Iterator - .iterate[Option[T]](Some(old)) { self.next(_) } + .iterate[Option[T]](Some(old))(self.next(_)) .takeWhile(_.isDefined) .collect { case Some(t) => t } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala index 3e5a8dd7a..dbb27c0e9 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala @@ -43,10 +43,8 @@ class SummingCache[K, V](capacity: Int)(implicit sgv: Semigroup[V]) extends Stat case (k, v) => val newV = cache .get(k) - .map { oldV => - sgv.plus(oldV, v) - } - .getOrElse { v } + .map(oldV => sgv.plus(oldV, v)) + .getOrElse(v) (k, newV) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala index 3114dd1af..ab1fbdacb 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala @@ -76,5 +76,5 @@ class SummingQueue[V] private (capacity: Int)(override implicit val semigroup: S queue.drainTo(toSum.asJava) Semigroup.sumOption(toSum) } - override def isFlushed: Boolean = queueOption.map { _.size == 0 }.getOrElse(true) + override def isFlushed: Boolean = queueOption.map(_.size == 0).getOrElse(true) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala index 9b3940c72..2c8d540de 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala @@ -55,9 +55,7 @@ class TopKMonoid[T](k: Int)(implicit ord: Ordering[T]) extends Monoid[TopK[T]] { override lazy val zero: TopK[T] = TopK[T](0, List[T](), None) def build(t: T): TopK[T] = TopK(1, List(t), Some(t)) - def build(ts: Iterable[T]): TopK[T] = ts.foldLeft(zero) { (acc, t) => - plus(acc, build(t)) - } + def build(ts: Iterable[T]): TopK[T] = ts.foldLeft(zero)((acc, t) => plus(acc, build(t))) override def plus(left: TopK[T], right: TopK[T]): TopK[T] = { val (bigger, smaller) = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala index 892a41fa9..d429aa283 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala @@ -41,9 +41,7 @@ private object VectorSpaceOps extends VectorSpaceOps sealed trait Implicits extends LowPrioImpicits { implicit def indexedSeqSpace[T: Ring] = - VectorSpaceOps.from[T, IndexedSeq] { (s, seq) => - seq.map(Ring.times(s, _)) - } + VectorSpaceOps.from[T, IndexedSeq]((s, seq) => seq.map(Ring.times(s, _))) } sealed trait LowPrioImpicits { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/field.scala b/algebird-core/src/main/scala/com/twitter/algebird/field.scala index deaa285dd..a34644fa3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/field.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/field.scala @@ -20,7 +20,7 @@ object field { override def plus(l: Float, r: Float): Float = l + r override def sum(v: TraversableOnce[Float]): Float = { var sum = 0.0f - v.foreach { sum += _ } + v.foreach(sum += _) return sum } override def trySum(v: TraversableOnce[Float]): Option[Float] = @@ -36,7 +36,7 @@ object field { override def plus(l: JFloat, r: JFloat): JFloat = l + r override def sum(v: TraversableOnce[JFloat]): JFloat = { var sum = 0.0f - v.foreach { sum += _ } + v.foreach(sum += _) return sum } override def trySum(v: TraversableOnce[JFloat]): Option[JFloat] = @@ -52,7 +52,7 @@ object field { override def plus(l: Double, r: Double): Double = l + r override def sum(v: TraversableOnce[Double]): Double = { var sum = 0.0 - v.foreach { sum += _.floatValue } + v.foreach(sum += _.floatValue) return sum } override def trySum(v: TraversableOnce[Double]): Option[Double] = @@ -68,7 +68,7 @@ object field { override def plus(l: JDouble, r: JDouble): JDouble = l + r override def sum(v: TraversableOnce[JDouble]): JDouble = { var sum = 0.0 - v.foreach { sum += _.doubleValue } + v.foreach(sum += _.doubleValue) return sum } override def trySum(v: TraversableOnce[JDouble]): Option[JDouble] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala index 1138e5f1c..3bb1897bf 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala @@ -47,9 +47,7 @@ object Cuber { val tupleName = { val types = getParamTypes(c) - val optionTypes = types.map { t => - tq"_root_.scala.Option[$t]" - } + val optionTypes = types.map(t => tq"_root_.scala.Option[$t]") val tupleType = typeName(c)(s"Tuple${arity}") tq"_root_.scala.$tupleType[..$optionTypes]" } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala index 078a6c406..919c746d0 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala @@ -53,9 +53,7 @@ object Roller { val tupleName = { val types = getParamTypes(c) - val optionTypes = types.map { t => - tq"_root_.scala.Option[$t]" - } + val optionTypes = types.map(t => tq"_root_.scala.Option[$t]") val tupleType = typeName(c)(s"Tuple${arity}") tq"_root_.scala.$tupleType[..$optionTypes]" } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala index 9031777e2..754a420c6 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala @@ -114,9 +114,7 @@ object AdaptiveMatrix { val iter = items.toIterator.buffered val rows = iter.head.rows val cols = iter.head.cols - val sparseStorage = (0 until rows).map { _ => - MMap[Int, V]() - }.toIndexedSeq + val sparseStorage = (0 until rows).map(_ => MMap[Int, V]()).toIndexedSeq while (iter.hasNext) { val current = iter.next diff --git a/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala b/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala index 3b0202aa5..69f553360 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala @@ -21,9 +21,7 @@ import com.twitter.algebird.{AdaptiveVector, Monoid} object SparseColumnMatrix { def fromSeqMap[V: Monoid](cols: Int, data: IndexedSeq[MMap[Int, V]]): SparseColumnMatrix[V] = { val monoidZero = implicitly[Monoid[V]].zero - SparseColumnMatrix(data.map { mm => - AdaptiveVector.fromMap(mm.toMap, monoidZero, cols) - }.toIndexedSeq) + SparseColumnMatrix(data.map(mm => AdaptiveVector.fromMap(mm.toMap, monoidZero, cols)).toIndexedSeq) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala index e06e02f45..f1e4c97dd 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala @@ -28,9 +28,7 @@ sealed trait Reader[-Env, +T] { def flatMap[E1 <: Env, U](next: T => Reader[E1, U]): Reader[E1, U] = FlatMappedReader[E1, T, U](this, next) def map[U](thatFn: T => U): Reader[Env, U] = - FlatMappedReader(this, { (t: T) => - ConstantReader(thatFn(t)) - }) + FlatMappedReader(this, (t: T) => ConstantReader(thatFn(t))) } final case class ConstantReader[+T](get: T) extends Reader[Any, T] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala index f2439b0ca..929694a66 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala @@ -37,7 +37,7 @@ sealed trait StateWithError[S, +F, +T] { sgs: Semigroup[S] ): // TODO: deep joins could blow the stack, not yet using trampoline here StateWithError[S, F1, (T, U)] = - StateFn({ (requested: S) => + StateFn { (requested: S) => (run(requested), that.run(requested)) match { case (Right((s1, r1)), Right((s2, r2))) => Right((sgs.plus(s1, s2), (r1, r2))) @@ -46,7 +46,7 @@ sealed trait StateWithError[S, +F, +T] { case (Left(err), _) => Left(err) case (_, Left(err)) => Left(err) } - }) + } def apply(state: S): Either[F, (S, T)] = run(state) @@ -56,9 +56,7 @@ sealed trait StateWithError[S, +F, +T] { FlatMappedState(this, next) def map[U](fn: (T) => U): StateWithError[S, F, U] = - FlatMappedState(this, { (t: T) => - StateWithError.const(fn(t)) - }) + FlatMappedState(this, (t: T) => StateWithError.const(fn(t))) } /** Simple wrapper of a function in the Monad */ @@ -92,26 +90,16 @@ final case class FlatMappedState[S, F, T, U](start: StateWithError[S, F, T], fn: object StateWithError { def getState[S]: StateWithError[S, Nothing, S] = - StateFn({ (state: S) => - Right((state, state)) - }) + StateFn((state: S) => Right((state, state))) def putState[S](newState: S): StateWithError[S, Nothing, Unit] = - StateFn({ (_: S) => - Right((newState, ())) - }) + StateFn((_: S) => Right((newState, ()))) def swapState[S](newState: S): StateWithError[S, Nothing, S] = - StateFn({ (old: S) => - Right((newState, old)) - }) + StateFn((old: S) => Right((newState, old))) def const[S, T](t: T): StateWithError[S, Nothing, T] = - StateFn({ (state: S) => - Right((state, t)) - }) + StateFn((state: S) => Right((state, t))) def lazyVal[S, T](t: => T): StateWithError[S, Nothing, T] = - StateFn({ (state: S) => - Right((state, t)) - }) + StateFn((state: S) => Right((state, t))) def failure[S, F](f: F): StateWithError[S, F, Nothing] = StateFn(_ => Left(f)) @@ -121,16 +109,12 @@ object StateWithError { */ def fromEither[S] = new ConstantStateMaker[S] class ConstantStateMaker[S] { - def apply[F, T](either: Either[F, T]): StateWithError[S, F, T] = { (s: S) => - either.right.map { (s, _) } - } + def apply[F, T](either: Either[F, T]): StateWithError[S, F, T] = { (s: S) => either.right.map((s, _)) } } class FunctionLifter[S] { def apply[I, F, T](fn: I => Either[F, T]): (I => StateWithError[S, F, T]) = { (i: I) => - StateFn({ (s: S) => - fn(i).right.map { (s, _) } - }) + StateFn((s: S) => fn(i).right.map((s, _))) } } // TODO this should move to Monad and work for any Monad @@ -141,9 +125,7 @@ object StateWithError { new StateFMonad[F, S] class StateFMonad[F, S] extends Monad[({ type Result[T] = StateWithError[S, F, T] })#Result] { - override def apply[T](const: T): StateWithError[S, Nothing, T] = { (s: S) => - Right((s, const)) - } + override def apply[T](const: T): StateWithError[S, Nothing, T] = { (s: S) => Right((s, const)) } override def flatMap[T, U]( earlier: StateWithError[S, F, T] )(next: T => StateWithError[S, F, U]): StateWithError[S, F, U] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala index f63093979..6e82042df 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala @@ -35,9 +35,7 @@ final case class Done[A](override val get: A) extends Trampoline[A] { final case class FlatMapped[C, A](start: Trampoline[C], fn: C => Trampoline[A]) extends Trampoline[A] { override def map[B](fn: A => B): FlatMapped[A, B] = - FlatMapped(this, { (a: A) => - Done(fn(a)) - }) + FlatMapped(this, (a: A) => Done(fn(a))) override lazy val get: A = Trampoline.run(this) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala index 0df87254c..ebc34289a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala @@ -28,7 +28,7 @@ private class IterCallStatistics(threadSafe: Boolean) { import scala.math.min import java.lang.Long.numberOfLeadingZeros val maxBucket = 10 - val distribution: IndexedSeq[Counter] = IndexedSeq.fill(maxBucket + 1) { Counter(threadSafe) } + val distribution: IndexedSeq[Counter] = IndexedSeq.fill(maxBucket + 1)(Counter(threadSafe)) val total: Counter = Counter(threadSafe) def put(v: Long): Unit = { @@ -38,7 +38,7 @@ private class IterCallStatistics(threadSafe: Boolean) { distribution(bucket).increment } - def count: Long = distribution.foldLeft(0L) { _ + _.get } // sum + def count: Long = distribution.foldLeft(0L)(_ + _.get) // sum def pow2(i: Int): Int = 1 << i diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala index 93e7088ce..c0e20ab2f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala @@ -41,7 +41,7 @@ class StatisticsSemigroup[T](threadSafe: Boolean = true)(implicit wrappedSemigro } override def sumOption(iter: TraversableOnce[T]): Option[T] = - sumOptionCallsStats.measure(iter) { Semigroup.sumOption(_) } + sumOptionCallsStats.measure(iter)(Semigroup.sumOption(_)) override def toString: String = "plus calls: " + plusCallsCount + "\n" + @@ -69,7 +69,7 @@ class StatisticsMonoid[T](threadSafe: Boolean = true)(implicit wrappedMonoid: Mo } override def sum(vs: TraversableOnce[T]): T = - sumCallsStats.measure(vs) { Monoid.sum(_) } + sumCallsStats.measure(vs)(Monoid.sum(_)) override def toString: String = super.toString + "\n" + @@ -135,7 +135,7 @@ class StatisticsRing[T](threadSafe: Boolean = true)(implicit ring: Ring[T]) } override def product(iter: TraversableOnce[T]): T = - productCallsStats.measure(iter) { Ring.product(_) } + productCallsStats.measure(iter)(Ring.product(_)) override def toString: String = super.toString + "\n" + diff --git a/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala b/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala index 44541ec33..c6c3e6d7e 100644 --- a/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala +++ b/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala @@ -15,9 +15,7 @@ class AlgebraResolutionTest extends AnyFunSuite { } test("algebra.ring.AdditiveSemigroup") { implicit def fakeAdditiveSemigroup[T]: algebra.ring.AdditiveSemigroup[T] = - Semigroup.from[T] { (a, _) => - a - } + Semigroup.from[T]((a, _) => a) implicitly[Semigroup[Empty]] } @@ -27,9 +25,7 @@ class AlgebraResolutionTest extends AnyFunSuite { } test("algebra.ring.AdditiveMonoid") { implicit def fakeAdditiveMonoid[T]: algebra.ring.AdditiveMonoid[T] = - Monoid.from[T](null.asInstanceOf[T]) { (a, _) => - a - } + Monoid.from[T](null.asInstanceOf[T])((a, _) => a) implicitly[Monoid[Empty]] } diff --git a/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala b/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala index fc56b5b20..65bb32b3f 100644 --- a/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala +++ b/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala @@ -20,13 +20,15 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { * T. */ def aggregateOption[B: ClassTag, C](agg: Aggregator[T, B, C]): Option[C] = { - val pr = rdd.mapPartitions({ data => - if (data.isEmpty) Iterator.empty - else { - val b = agg.prepare(data.next) - Iterator(agg.appendAll(b, data)) - } - }, preservesPartitioning = true) + val pr = rdd.mapPartitions( + data => + if (data.isEmpty) Iterator.empty + else { + val b = agg.prepare(data.next) + Iterator(agg.appendAll(b, data)) + }, + preservesPartitioning = true + ) pr.repartition(1) .mapPartitions(pr => Iterator(agg.semigroup.sumOption(pr))) .collect @@ -74,9 +76,8 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { * This mapValues implementation allows us to avoid needing the V1 ClassTag, which would * be required to use the implementation in PairRDDFunctions */ - val prepared = keyed.mapPartitions({ it => - it.map { case (k, v) => (k, agg.prepare(v)) } - }, preservesPartitioning = true) + val prepared = + keyed.mapPartitions(it => it.map { case (k, v) => (k, agg.prepare(v)) }, preservesPartitioning = true) toPair( toPair(prepared) @@ -122,17 +123,14 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { * T. */ def sumOption(implicit sg: Semigroup[T], ct: ClassTag[T]): Option[T] = { - val partialReduce: RDD[T] = rdd.mapPartitions({ itT => - sg.sumOption(itT).toIterator - }, preservesPartitioning = true) + val partialReduce: RDD[T] = + rdd.mapPartitions(itT => sg.sumOption(itT).toIterator, preservesPartitioning = true) // my reading of the docs is that we do want a shuffle at this stage to // to make sure the upstream work is done in parallel. val results = partialReduce .repartition(1) - .mapPartitions({ it => - Iterator(sg.sumOption(it)) - }, preservesPartitioning = true) + .mapPartitions(it => Iterator(sg.sumOption(it)), preservesPartitioning = true) .collect assert(results.size == 1, s"Should only be 1 item: ${results.toList}") diff --git a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala index 16e409489..b6de4bdf6 100644 --- a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala +++ b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala @@ -60,9 +60,7 @@ class AlgebirdRDDTest extends AnyFunSuite with BeforeAndAfter { val resMap = sc.makeRDD(s).algebird.aggregateByKey[K, T, U, V](agg).collect.toMap implicit val sg = agg.semigroup val algMap = MapAlgebra.sumByKey(s.map { case (k, t) => k -> agg.prepare(t) }).mapValues(agg.present) - s.map(_._1).toSet.foreach { k: K => - assertEq(resMap.get(k), algMap.get(k)) - } + s.map(_._1).toSet.foreach { k: K => assertEq(resMap.get(k), algMap.get(k)) } } def sumOption[T: ClassTag: Equiv: Semigroup](s: Seq[T]): Unit = @@ -71,9 +69,7 @@ class AlgebirdRDDTest extends AnyFunSuite with BeforeAndAfter { def sumByKey[K: ClassTag, V: ClassTag: Semigroup: Equiv](s: Seq[(K, V)]): Unit = { val resMap = sc.makeRDD(s).algebird.sumByKey[K, V].collect.toMap val algMap = MapAlgebra.sumByKey(s) - s.map(_._1).toSet.foreach { k: K => - assertEq(resMap.get(k), algMap.get(k)) - } + s.map(_._1).toSet.foreach { k: K => assertEq(resMap.get(k), algMap.get(k)) } } /** diff --git a/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala index e37d2d7f4..0bf777615 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala @@ -31,9 +31,7 @@ object ApplicativeLaws { arb: Arbitrary[T], arbFn: Arbitrary[T => U] ): Prop = - forAll { (t: T, fn: T => U) => - eq(app.map(app.apply(t))(fn), app.apply(fn(t))) - } + forAll((t: T, fn: T => U) => eq(app.map(app.apply(t))(fn), app.apply(fn(t)))) def joinLaw[M[_], T, U](eq: HigherEq[M] = new DefaultHigherEq[M])( implicit @@ -41,9 +39,7 @@ object ApplicativeLaws { arb1: Arbitrary[T], arb2: Arbitrary[U] ): Prop = - forAll { (t: T, u: U) => - eq(app.join(app.apply(t), app.apply(u)), app.apply((t, u))) - } + forAll((t: T, u: U) => eq(app.join(app.apply(t), app.apply(u)), app.apply((t, u)))) // These follow from apply and join: @@ -52,9 +48,7 @@ object ApplicativeLaws { app: Applicative[M], arb: Arbitrary[Seq[T]] ): Prop = - forAll { (ts: Seq[T]) => - eq(app.sequence(ts.map { app.apply(_) }), app.apply(ts)) - } + forAll((ts: Seq[T]) => eq(app.sequence(ts.map(app.apply(_))), app.apply(ts))) def joinWithLaw[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])( implicit diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala index 905971b91..ed3edaa54 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala @@ -26,9 +26,7 @@ object BaseVectorSpaceProperties { def isEqualIfZero[F, C[_]]( eqfn: (C[F], C[F]) => Boolean )(implicit vs: VectorSpace[F, C], arb: Arbitrary[C[F]]) = - forAll { (a: C[F]) => - eqfn(VectorSpace.scale(vs.field.zero, a), vs.group.zero) - } + forAll((a: C[F]) => eqfn(VectorSpace.scale(vs.field.zero, a), vs.group.zero)) def distributesWithPlus[F, C[_]]( eqfn: (C[F], C[F]) => Boolean @@ -51,9 +49,7 @@ object BaseVectorSpaceProperties { def identityOne[F, C[_]]( eqfn: (C[F], C[F]) => Boolean )(implicit vs: VectorSpace[F, C], arb: Arbitrary[C[F]]) = - forAll { (a: C[F]) => - eqfn(VectorSpace.scale(vs.field.one, a), a) - } + forAll((a: C[F]) => eqfn(VectorSpace.scale(vs.field.one, a), a)) def distributesOverScalarPlus[F, C[_]]( eqfn: (C[F], C[F]) => Boolean diff --git a/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala index d07c967d2..aa1d8121d 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala @@ -30,11 +30,7 @@ object FunctorLaws { functor: Functor[M], arb: Arbitrary[M[V]] ): Prop = - forAll { (mv: M[V]) => - eq(functor.map(mv) { x => - x - }, mv) - } + forAll((mv: M[V]) => eq(functor.map(mv)(x => x), mv)) def composeLaw[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])( implicit diff --git a/algebird-test/src/main/scala/com/twitter/algebird/MetricProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/MetricProperties.scala index 115b3704c..b0c038c0c 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/MetricProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/MetricProperties.scala @@ -44,9 +44,7 @@ trait MetricProperties { } def isSymmetric[T: Metric: Arbitrary]: Prop = - forAll { (a: T, b: T) => - beCloseTo(Metric(a, b), Metric(b, a)) - } + forAll((a: T, b: T) => beCloseTo(Metric(a, b), Metric(b, a))) def satisfiesTriangleInequality[T: Metric: Arbitrary]: Prop = forAll { (a: T, b: T, c: T) => diff --git a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala index 8ad55b5d5..43e0e6116 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala @@ -27,9 +27,7 @@ import Monad.{operators, pureOp} object MonadLaws { // $COVERAGE-OFF$Turn off coverage for deprecated laws. @deprecated("No longer used. Use Equiv[T] instance", since = "0.13.0") - def defaultEq[T] = { (t0: T, t1: T) => - (t0 == t1) - } + def defaultEq[T] = { (t0: T, t1: T) => (t0 == t1) } @deprecated("use leftIdentity[T]", since = "0.13.0") def leftIdentityEquiv[M[_], T, U]( @@ -65,9 +63,7 @@ object MonadLaws { for { m <- implicitly[Arbitrary[Map[T, M[U]]]].arbitrary defu <- implicitly[Arbitrary[M[U]]].arbitrary - } yield ({ (t: T) => - m.getOrElse(t, defu) - }) + } yield ({ (t: T) => m.getOrElse(t, defu) }) } @deprecated("use monadLaws[T]", since = "0.13.0") @@ -93,14 +89,10 @@ object MonadLaws { arbfn: Arbitrary[(T) => M[U]], equiv: Equiv[M[U]] ) = - forAll { (t: T, fn: T => M[U]) => - Equiv[M[U]].equiv(t.pure[M].flatMap(fn), fn(t)) - } + forAll((t: T, fn: T => M[U]) => Equiv[M[U]].equiv(t.pure[M].flatMap(fn), fn(t))) def rightIdentity[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]) = - forAll { (mt: M[T]) => - Equiv[M[T]].equiv(mt.flatMap { _.pure[M] }, mt) - } + forAll((mt: M[T]) => Equiv[M[T]].equiv(mt.flatMap(_.pure[M]), mt)) def associative[M[_], T, U, V]( implicit monad: Monad[M], @@ -109,9 +101,7 @@ object MonadLaws { fn2: Arbitrary[U => M[V]], equiv: Equiv[M[V]] ) = forAll { (mt: M[T], f1: T => M[U], f2: U => M[V]) => - Equiv[M[V]].equiv(mt.flatMap(f1).flatMap(f2), mt.flatMap { t => - f1(t).flatMap(f2) - }) + Equiv[M[V]].equiv(mt.flatMap(f1).flatMap(f2), mt.flatMap(t => f1(t).flatMap(f2))) } def monadLaws[M[_], T, U, R]( @@ -129,33 +119,25 @@ object MonadLaws { associative[M, T, U, R] && rightIdentity[M, R] && leftIdentity[M, U, R] implicit def indexedSeqA[T](implicit arbl: Arbitrary[List[T]]): Arbitrary[IndexedSeq[T]] = - Arbitrary { arbl.arbitrary.map { _.toIndexedSeq } } + Arbitrary(arbl.arbitrary.map(_.toIndexedSeq)) implicit def vectorA[T](implicit arbl: Arbitrary[List[T]]): Arbitrary[Vector[T]] = Arbitrary { - arbl.arbitrary.map { l => - Vector(l: _*) - } + arbl.arbitrary.map(l => Vector(l: _*)) } implicit def seqA[T](implicit arbl: Arbitrary[List[T]]): Arbitrary[Seq[T]] = Arbitrary { - arbl.arbitrary.map { l => - Seq(l: _*) - } + arbl.arbitrary.map(l => Seq(l: _*)) } implicit def someA[T](implicit arbl: Arbitrary[T]): Arbitrary[Some[T]] = Arbitrary { - arbl.arbitrary.map { l => - Some(l) - } + arbl.arbitrary.map(l => Some(l)) } implicit def identityA[T](implicit arbl: Arbitrary[T]): Arbitrary[Identity[T]] = Arbitrary { - arbl.arbitrary.map { l => - Identity(l) - } + arbl.arbitrary.map(l => Identity(l)) } } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/PredecessibleLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/PredecessibleLaws.scala index 9a685faff..d9025877a 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/PredecessibleLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/PredecessibleLaws.scala @@ -53,9 +53,7 @@ object PredecessibleLaws { * }}} */ def predecessibleLaws[T: Predecessible: Arbitrary]: Prop = - forAll { (t: T, size: Short) => - law(t) && iteratePrevDecreases(t, size) - } + forAll((t: T, size: Short) => law(t) && iteratePrevDecreases(t, size)) @deprecated("Deprecated in favor of predecessibleLaws.", since = "0.12.3") def predessibleLaws[T: Predecessible: Arbitrary]: Prop = predecessibleLaws diff --git a/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala index b61d5dfd0..422e11ad5 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala @@ -37,7 +37,7 @@ object StatefulSummerLaws { def sumIsPreserved[V: Semigroup: Equiv](summer: StatefulSummer[V], items: Iterable[V]): Boolean = { summer.flush val sg = Semigroup.sumOption(items) - val wsummer = Monoid.plus(Monoid.sum(items.map { summer.put(_) }.filter { + val wsummer = Monoid.plus(Monoid.sum(items.map(summer.put(_)).filter { _.isDefined }), summer.flush) zeroEquiv(sg, wsummer) && summer.isFlushed diff --git a/algebird-test/src/main/scala/com/twitter/algebird/SuccessibleLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/SuccessibleLaws.scala index 70dcba1a9..1f4c37f8c 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/SuccessibleLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/SuccessibleLaws.scala @@ -53,7 +53,5 @@ object SuccessibleLaws { * }}} */ def successibleLaws[T: Successible: Arbitrary]: Prop = - forAll { (t: T, size: Short) => - law(t) && iterateNextIncreases(t, size) - } + forAll((t: T, size: Short) => law(t) && iterateNextIncreases(t, size)) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala index daa5329c7..c26d2dcc5 100755 --- a/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AbstractAlgebraTest.scala @@ -9,16 +9,12 @@ class AbstractAlgebraTest extends CheckProperties with Matchers { property("A Monoid should be able to sum") { val monoid = implicitly[Monoid[Int]] - forAll { intList: List[Int] => - intList.sum == monoid.sum(intList) - } + forAll { intList: List[Int] => intList.sum == monoid.sum(intList) } } property("A Ring should be able to product") { val ring = implicitly[Ring[Int]] - forAll { intList: List[Int] => - intList.product == ring.product(intList) - } + forAll { intList: List[Int] => intList.product == ring.product(intList) } } property("An OptionMonoid should be able to sum") { @@ -59,9 +55,7 @@ class AbstractAlgebraTest extends CheckProperties with Matchers { property("IndexedSeq should sum") { forAll { (lIndexedSeq: IndexedSeq[Int]) => - val rIndexedSeq = lIndexedSeq.map { _ => - scala.util.Random.nextInt - } + val rIndexedSeq = lIndexedSeq.map(_ => scala.util.Random.nextInt) (lIndexedSeq.size == rIndexedSeq.size) ==> { val leftBase = lIndexedSeq.map(Max(_)) val rightBase = rIndexedSeq.map(Max(_)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AdJoinedUnitRingLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AdJoinedUnitRingLaws.scala index bbad87c9f..67a2dd272 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AdJoinedUnitRingLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AdJoinedUnitRingLaws.scala @@ -24,13 +24,11 @@ import org.scalacheck.Prop.forAll class AdjoinedUnitRingLaws extends CheckProperties { // AdjoinedUnit requires this method to be correct, so it is tested here: property("intTimes works correctly") { - forAll { (bi0: BigInt, bi1: BigInt) => - Group.intTimes(bi0, bi1) == (bi0 * bi1) - } + forAll((bi0: BigInt, bi1: BigInt) => Group.intTimes(bi0, bi1) == (bi0 * bi1)) } implicit def rng[T: Ring]: Rng[T] = implicitly[Ring[T]] - property("AdjoinedUnit[Int] is a Ring") { ringLaws[AdjoinedUnit[Int]] } - property("AdjoinedUnit[Long] is a Ring") { ringLaws[AdjoinedUnit[Long]] } + property("AdjoinedUnit[Int] is a Ring")(ringLaws[AdjoinedUnit[Int]]) + property("AdjoinedUnit[Long] is a Ring")(ringLaws[AdjoinedUnit[Long]]) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala index f4c5b19d3..a638e3c0f 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala @@ -90,7 +90,7 @@ class AggregatorLaws extends CheckProperties { val aggregator = Aggregator.numericSum[T] aggregator(in) == in.map(num.toDouble).sum } - property("Aggregator.numericSum is correct for Ints") { checkNumericSum[Int] } + property("Aggregator.numericSum is correct for Ints")(checkNumericSum[Int]) property("Aggregator.numericSum is correct for Longs") { checkNumericSum[Long] } @@ -118,29 +118,19 @@ class AggregatorLaws extends CheckProperties { } property("Aggregator.count is like List.count") { - forAll { (in: List[Int], fn: Int => Boolean) => - in.count(fn) == (Aggregator.count(fn)(in)) - } + forAll((in: List[Int], fn: Int => Boolean) => in.count(fn) == (Aggregator.count(fn)(in))) } property("Aggregator.exists is like List.exists") { - forAll { (in: List[Int], fn: Int => Boolean) => - in.exists(fn) == (Aggregator.exists(fn)(in)) - } + forAll((in: List[Int], fn: Int => Boolean) => in.exists(fn) == (Aggregator.exists(fn)(in))) } property("Aggregator.forall is like List.forall") { - forAll { (in: List[Int], fn: Int => Boolean) => - in.forall(fn) == (Aggregator.forall(fn)(in)) - } + forAll((in: List[Int], fn: Int => Boolean) => in.forall(fn) == (Aggregator.forall(fn)(in))) } property("Aggregator.head is like List.head") { - forAll { (in: List[Int]) => - in.headOption == (Aggregator.head.applyOption(in)) - } + forAll((in: List[Int]) => in.headOption == (Aggregator.head.applyOption(in))) } property("Aggregator.last is like List.last") { - forAll { (in: List[Int]) => - in.lastOption == (Aggregator.last.applyOption(in)) - } + forAll((in: List[Int]) => in.lastOption == (Aggregator.last.applyOption(in))) } property("Aggregator.maxBy is like List.maxBy") { forAll { (head: Int, in: List[Int], fn: Int => Int) => @@ -201,9 +191,7 @@ class AggregatorLaws extends CheckProperties { } } property("Aggregator.toList is identity on lists") { - forAll { (in: List[Int]) => - in == Aggregator.toList(in) - } + forAll((in: List[Int]) => in == Aggregator.toList(in)) } property("MonoidAggregator.sumBefore is correct") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala index 45b73c7fa..343c28e0e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala @@ -5,7 +5,7 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class AppendAggregatorTest extends AnyWordSpec with Matchers { - val data = Vector.fill(100) { scala.util.Random.nextInt(100) } + val data = Vector.fill(100)(scala.util.Random.nextInt(100)) val mpty = Vector.empty[Int] // test the methods that appendSemigroup method defines or overrides @@ -20,9 +20,7 @@ class AppendAggregatorTest extends AnyWordSpec with Matchers { val (half1, half2) = data.splitAt(n / 2) val lhs = agg1.appendAll(agg1.prepare(half1.head), half1.tail) - data.foreach { e => - agg1.prepare(e) should be(agg2.prepare(e)) - } + data.foreach(e => agg1.prepare(e) should be(agg2.prepare(e))) agg1.present(lhs) should be(agg2.present(lhs)) @@ -31,9 +29,7 @@ class AppendAggregatorTest extends AnyWordSpec with Matchers { agg1.applyOption(data) should be(agg2.applyOption(data)) agg1.applyOption(empty) should be(agg2.applyOption(empty)) - half2.foreach { e => - agg1.append(lhs, e) should be(agg2.append(lhs, e)) - } + half2.foreach(e => agg1.append(lhs, e) should be(agg2.append(lhs, e))) agg1.appendAll(lhs, half2) should be(agg2.appendAll(lhs, half2)) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala index 5d32bb0ac..5a38df194 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala @@ -51,8 +51,8 @@ class ApproximateLaws extends CheckProperties { } } - val trueGen = choose(0.0, 1.0).map { ApproximateBoolean(true, _) } - val falseGen = choose(0.0, 1.0).map { ApproximateBoolean(false, _) } + val trueGen = choose(0.0, 1.0).map(ApproximateBoolean(true, _)) + val falseGen = choose(0.0, 1.0).map(ApproximateBoolean(false, _)) implicit val approxArb: Arbitrary[ApproximateBoolean] = Arbitrary( diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala index 01fbf2ae5..7aade0133 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala @@ -25,9 +25,7 @@ class AveragedValueLaws extends CheckProperties { } property("AveragedValue.aggregator returns the average") { - forAll { v: NonEmptyVector[Double] => - approxEq(1e-10)(avg(v.items), AveragedValue.aggregator(v.items)) - } + forAll { v: NonEmptyVector[Double] => approxEq(1e-10)(avg(v.items), AveragedValue.aggregator(v.items)) } } property("AveragedValue instances subtract") { @@ -39,9 +37,7 @@ class AveragedValueLaws extends CheckProperties { } property("AveragedValue can absorb numbers directly") { - forAll { (base: AveragedValue, x: BigInt) => - (base + AveragedValue(x)) == (base + x) - } + forAll((base: AveragedValue, x: BigInt) => (base + AveragedValue(x)) == (base + x)) } property("AveragedValue works by + or sumOption") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala index f2e867337..7e83d5ca0 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala @@ -54,9 +54,7 @@ class BatchedLaws extends CheckProperties { class BatchedTests extends AnyPropSpec with Matchers with ScalaCheckPropertyChecks { property(".iterator works") { - forAll { (x: Int, xs: List[Int]) => - Batched(x).append(xs).iterator.toList shouldBe (x :: xs) - } + forAll((x: Int, xs: List[Int]) => Batched(x).append(xs).iterator.toList shouldBe (x :: xs)) } property(".iterator and .reverseIterator agree") { @@ -67,8 +65,6 @@ class BatchedTests extends AnyPropSpec with Matchers with ScalaCheckPropertyChec } property(".toList works") { - forAll { (b: Batched[Int]) => - b.toList shouldBe b.iterator.toList - } + forAll((b: Batched[Int]) => b.toList shouldBe b.iterator.toList) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala index 234405b2b..bfaf377b6 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala @@ -40,16 +40,10 @@ class BloomFilterLaws extends CheckProperties { implicit val bfGen: Arbitrary[BF[String]] = Arbitrary { - val item = Gen.choose(0, 10000).map { v => - bfMonoid.create(v.toString) - } + val item = Gen.choose(0, 10000).map(v => bfMonoid.create(v.toString)) val zero = Gen.const(bfMonoid.zero) - val sparse = Gen.listOf(item).map { its => - toSparse(bfMonoid.sum(its)) - } - val dense = Gen.listOf(item).map { its => - toDense(bfMonoid.sum(its)) - } + val sparse = Gen.listOf(item).map(its => toSparse(bfMonoid.sum(its))) + val dense = Gen.listOf(item).map(its => toDense(bfMonoid.sum(its))) Gen.frequency((1, zero), (5, item), (10, sparse), (10, dense)) } @@ -58,24 +52,18 @@ class BloomFilterLaws extends CheckProperties { } property("++ is the same as plus") { - forAll { (a: BF[String], b: BF[String]) => - Equiv[BF[String]].equiv(a ++ b, bfMonoid.plus(a, b)) - } + forAll((a: BF[String], b: BF[String]) => Equiv[BF[String]].equiv(a ++ b, bfMonoid.plus(a, b))) } property("the distance between a filter and itself should be 0") { - forAll { (a: BF[String]) => - a.hammingDistance(a) == 0 - } + forAll((a: BF[String]) => a.hammingDistance(a) == 0) } property( "the distance between a filter and an empty filter should be the number of bits" + "set in the existing filter" ) { - forAll { (a: BF[String]) => - a.hammingDistance(bfMonoid.zero) == a.numBits - } + forAll((a: BF[String]) => a.hammingDistance(bfMonoid.zero) == a.numBits) } property("all equivalent filters should have 0 Hamming distance") { @@ -90,9 +78,7 @@ class BloomFilterLaws extends CheckProperties { } property("distance between filters should be symmetrical") { - forAll { (a: BF[String], b: BF[String]) => - a.hammingDistance(b) == b.hammingDistance(a) - } + forAll((a: BF[String], b: BF[String]) => a.hammingDistance(b) == b.hammingDistance(a)) } property("+ is the same as adding with create") { @@ -102,15 +88,11 @@ class BloomFilterLaws extends CheckProperties { } property("maybeContains is consistent with contains") { - forAll { (a: BF[String], b: String) => - a.maybeContains(b) == a.contains(b).isTrue - } + forAll((a: BF[String], b: String) => a.maybeContains(b) == a.contains(b).isTrue) } property("after + maybeContains is true") { - forAll { (a: BF[String], b: String) => - (a + b).maybeContains(b) - } + forAll((a: BF[String], b: String) => (a + b).maybeContains(b)) } property("checkAndAdd works like check the add") { @@ -124,14 +106,10 @@ class BloomFilterLaws extends CheckProperties { } property(".dense returns an equivalent BF") { - forAll { (a: BF[String]) => - Equiv[BF[String]].equiv(toSparse(a).dense, a) - } + forAll((a: BF[String]) => Equiv[BF[String]].equiv(toSparse(a).dense, a)) } property("a ++ a = a for BF") { - forAll { (a: BF[String]) => - Equiv[BF[String]].equiv(a ++ a, a) - } + forAll((a: BF[String]) => Equiv[BF[String]].equiv(a ++ a, a)) } } @@ -149,11 +127,7 @@ class BFHashIndices extends CheckProperties { } property("Indices are non negative") { - forAll { (hash: BFHash[String], v: Long) => - hash.apply(v.toString).forall { e => - e >= 0 - } - } + forAll((hash: BFHash[String], v: Long) => hash.apply(v.toString).forall(e => e >= 0)) } /** @@ -313,9 +287,7 @@ class BloomFilterTest extends AnyWordSpec with Matchers { val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) val bf = bfMonoid.create(entries: _*) - entries.foreach { i => - assert(bf.contains(i.toString).isTrue) - } + entries.foreach(i => assert(bf.contains(i.toString).isTrue)) } } @@ -348,7 +320,7 @@ class BloomFilterTest extends AnyWordSpec with Matchers { "approximate cardinality" in { val bfMonoid = BloomFilterMonoid[String](10, 100000) Seq(10, 100, 1000, 10000).foreach { exactCardinality => - val items = (1 until exactCardinality).map { _.toString } + val items = (1 until exactCardinality).map(_.toString) val bf = bfMonoid.create(items: _*) val size = bf.size @@ -365,9 +337,7 @@ class BloomFilterTest extends AnyWordSpec with Matchers { val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) val bf = aggregator(entries) - entries.foreach { i => - assert(bf.contains(i.toString).isTrue) - } + entries.foreach(i => assert(bf.contains(i.toString).isTrue)) } } @@ -414,9 +384,7 @@ class BloomFilterTest extends AnyWordSpec with Matchers { val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) val bf = bfMonoid.create(entries: _*) entries - .map { entry => - (entry, bfMonoid.create(entry)) - } + .map(entry => (entry, bfMonoid.create(entry))) .foldLeft((bfMonoid.zero, bfMonoid.zero)) { case ((left, leftAlt), (entry, _)) => val (newLeftAlt, contained) = leftAlt.checkAndAdd(entry) @@ -424,9 +392,7 @@ class BloomFilterTest extends AnyWordSpec with Matchers { (left + entry, newLeftAlt) } - entries.foreach { i => - assert(bf.contains(i.toString).isTrue) - } + entries.foreach(i => assert(bf.contains(i.toString).isTrue)) } } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala index e6e44467d..ea092aaff 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala @@ -9,5 +9,5 @@ import org.scalatest.propspec.AnyPropSpec trait CheckProperties extends AnyPropSpec with Checkers { def property(testName: String, testTags: org.scalatest.Tag*)(testFun: org.scalacheck.Prop): Unit = - super.property(testName, testTags: _*) { check { testFun } } + super.property(testName, testTags: _*)(check(testFun)) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala index d9575b169..9da06f938 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala @@ -55,7 +55,7 @@ class CollectionSpecification extends CheckProperties { } implicit def arbSeq[T: Arbitrary]: Arbitrary[Seq[T]] = - Arbitrary { implicitly[Arbitrary[List[T]]].arbitrary.map { _.toSeq } } + Arbitrary(implicitly[Arbitrary[List[T]]].arbitrary.map(_.toSeq)) property("Seq plus") { forAll { (a: Seq[Int], b: Seq[Int]) => @@ -88,24 +88,18 @@ class CollectionSpecification extends CheckProperties { val mv = implicitly[Monoid[V]] implicitly[Arbitrary[Map[K, V]]].arbitrary .map { - _.filter { kv => - mv.isNonZero(kv._2) - } + _.filter(kv => mv.isNonZero(kv._2)) } } implicit def scMapArb[K: Arbitrary, V: Arbitrary: Monoid] = Arbitrary { mapArb[K, V].arbitrary - .map { map: Map[K, V] => - map: ScMap[K, V] - } + .map { map: Map[K, V] => map: ScMap[K, V] } } implicit def mMapArb[K: Arbitrary, V: Arbitrary: Monoid] = Arbitrary { mapArb[K, V].arbitrary - .map { map: Map[K, V] => - MMap(map.toSeq: _*): MMap[K, V] - } + .map { map: Map[K, V] => MMap(map.toSeq: _*): MMap[K, V] } } def mapPlusTimesKeys[M <: ScMap[Int, Int]](implicit rng: Ring[ScMap[Int, Int]], arbMap: Arbitrary[M]) = @@ -113,9 +107,7 @@ class CollectionSpecification extends CheckProperties { // Subsets because zeros are removed from the times/plus values ((rng.times(a, b)).keys.toSet.subsetOf((a.keys.toSet & b.keys.toSet)) && (rng.plus(a, b)).keys.toSet.subsetOf((a.keys.toSet | b.keys.toSet)) && - (rng.plus(a, a).keys == (a.filter { kv => - (kv._2 + kv._2) != 0 - }).keys)) + (rng.plus(a, a).keys == a.filter(kv => (kv._2 + kv._2) != 0).keys)) } property("Map plus/times keys") { @@ -187,7 +179,7 @@ class CollectionSpecification extends CheckProperties { implicit def arbIndexedSeq[T: Arbitrary]: Arbitrary[IndexedSeq[T]] = Arbitrary { - implicitly[Arbitrary[List[T]]].arbitrary.map { _.toIndexedSeq } + implicitly[Arbitrary[List[T]]].arbitrary.map(_.toIndexedSeq) } property("IndexedSeq (of a Semigroup) is a semigroup") { @@ -205,15 +197,11 @@ class CollectionSpecification extends CheckProperties { } property("MapAlgebra.removeZeros works") { - forAll { (m: Map[Int, Int]) => - (MapAlgebra.removeZeros(m).values.toSet.contains(0) == false) - } + forAll((m: Map[Int, Int]) => (MapAlgebra.removeZeros(m).values.toSet.contains(0) == false)) } property("Monoid.sum performs w/ or w/o MapAlgebra.removeZeros") { - forAll { (m: Map[Int, Int]) => - (Monoid.sum(m) == Monoid.sum(MapAlgebra.removeZeros(m))) - } + forAll((m: Map[Int, Int]) => (Monoid.sum(m) == Monoid.sum(MapAlgebra.removeZeros(m)))) } property("MapAlgebra.sumByKey works") { @@ -221,11 +209,9 @@ class CollectionSpecification extends CheckProperties { import com.twitter.algebird.Operators._ val tupList = keys.zip(values) val expected = tupList - .groupBy { _._1 } - .mapValues { v => - v.map { _._2 }.sum - } - .filter { _._2 != 0 } + .groupBy(_._1) + .mapValues(v => v.map(_._2).sum) + .filter(_._2 != 0) .toMap MapAlgebra.sumByKey(tupList) == expected && tupList.sumByKey == expected } @@ -244,9 +230,7 @@ class CollectionSpecification extends CheckProperties { forAll { (m1: Map[Int, Int], m2: Map[Int, Int]) => // .toList below is to make sure we don't remove duplicate values (MapAlgebra.dot(m1, m2) == - (m1.keySet ++ m2.keySet).toList.map { k => - m1.getOrElse(k, 0) * m2.getOrElse(k, 0) - }.sum) + (m1.keySet ++ m2.keySet).toList.map(k => m1.getOrElse(k, 0) * m2.getOrElse(k, 0)).sum) } } @@ -257,9 +241,7 @@ class CollectionSpecification extends CheckProperties { .toIterator .flatMap { case (k, sv) => - sv.map { v => - (k, v) - } + sv.map(v => (k, v)) } .toSet == l) } @@ -296,18 +278,14 @@ class CollectionSpecification extends CheckProperties { forAll { (m1: Map[Int, Int], m2: Map[Int, Int]) => val m3 = MapAlgebra.join(m1, m2) val m1after = m3 - .mapValues { vw => - vw._1 - } - .filter { _._2.isDefined } - .mapValues { _.get } + .mapValues(vw => vw._1) + .filter(_._2.isDefined) + .mapValues(_.get) .toMap val m2after = m3 - .mapValues { vw => - vw._2 - } - .filter { _._2.isDefined } - .mapValues { _.get } + .mapValues(vw => vw._2) + .filter(_._2.isDefined) + .mapValues(_.get) .toMap val m1Orm2 = (m1.keySet | m2.keySet) ((m1after == m1) && (m2after == m2) && (m3.keySet == m1Orm2)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala index ac382e405..374255987 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala @@ -23,9 +23,7 @@ import org.scalacheck.Arbitrary class CombinatorTest extends CheckProperties { private def fold(m: Max[Int], l: List[Int]): List[Int] = { - val sortfn = { (i: Int) => - i % (scala.math.sqrt(m.get.toLong - Int.MinValue).toInt + 1) - } + val sortfn = { (i: Int) => i % (scala.math.sqrt(m.get.toLong - Int.MinValue).toInt + 1) } l.sortWith { (l, r) => val (sl, sr) = (sortfn(l), sortfn(r)) if (sl == sr) l < r else sl < sr @@ -54,7 +52,7 @@ class CombinatorTest extends CheckProperties { // Now test the expected use case: top-K by appearances: implicit val monTopK: Monoid[(Map[Int, Int], Set[Int])] = - new MonoidCombinator({ (m: Map[Int, Int], top: Set[Int]) => + new MonoidCombinator((m: Map[Int, Int], top: Set[Int]) => top.toList .sortWith { (l, r) => val lc = m(l) @@ -64,17 +62,15 @@ class CombinatorTest extends CheckProperties { } .take(40) .toSet - }) + ) // Make sure the sets start sorted: implicit def topKArb: Arbitrary[(Map[Int, Int], Set[Int])] = Arbitrary { for (s <- Arbitrary.arbitrary[List[Int]]; - smallvals = s.map { _ % 31 }; + smallvals = s.map(_ % 31); m = smallvals - .groupBy { s => - s - } - .mapValues { _.size }) + .groupBy(s => s) + .mapValues(_.size)) yield monTopK.plus(monTopK.zero, (m.toMap, smallvals.toSet)) } property("MonoidCombinator with top-K forms a Monoid") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala index 47b3d1b71..b00099762 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala @@ -193,7 +193,9 @@ class CMSContraMapSpec extends AnyWordSpec with Matchers with ScalaCheckDrivenPr // then the result should be a CMSHasher[L]... val targetHasher: CMSHasher[Seq[Byte]] = sourceHasher.contramap((d: Seq[Byte]) => f(d)) - targetHasher shouldBe an[CMSHasher[_]] // Can't test CMSHasher[Seq[Byte]] specifically because of type erasure. + targetHasher shouldBe an[CMSHasher[ + _ + ]] // Can't test CMSHasher[Seq[Byte]] specifically because of type erasure. // ...and hashing should work correctly (this is only a smoke test). val a = 4 @@ -371,9 +373,7 @@ class CmsInnerProductProperty[K: CMSHasher: Gen] extends CmsProperty[K] { def exactResult(lists: (Vector[K], Vector[K]), input: Unit) = { val counts1 = lists._1.groupBy(identity).mapValues(_.size) val counts2 = lists._2.groupBy(identity).mapValues(_.size) - (counts1.keys.toSet & counts2.keys.toSet).toSeq.map { k => - counts1(k) * counts2(k) - }.sum + (counts1.keys.toSet & counts2.keys.toSet).toSeq.map(k => counts1(k) * counts2(k)).sum } def approximateResult(cmses: (CMS[K], CMS[K]), input: Unit) = @@ -443,7 +443,7 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) def exactHeavyHitters(data: Seq[K], heavyHittersPct: Double): Set[K] = { val counts = data.groupBy(x => x).mapValues(_.size) val totalCount = counts.values.sum - counts.filter { _._2 >= heavyHittersPct * totalCount }.keys.toSet + counts.filter(_._2 >= heavyHittersPct * totalCount).keys.toSet } /** @@ -455,9 +455,7 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) */ def createRandomStream(size: Int, range: Int, rnd: Random = RAND): Seq[K] = { require(size > 0) - (1 to size).map { _ => - toK(rnd.nextInt(range)) - } + (1 to size).map(_ => toK(rnd.nextInt(range))) } "A Count-Min sketch implementing CMSCounting" should { @@ -539,15 +537,9 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) "estimate heavy hitters" in { // Simple way of making some elements appear much more often than others. - val data1 = (1 to 3000).map { _ => - toK(RAND.nextInt(3)) - } - val data2 = (1 to 3000).map { _ => - toK(RAND.nextInt(10)) - } - val data3 = (1 to 1450).map { _ => - toK(-1) - } // element close to being a 20% heavy hitter + val data1 = (1 to 3000).map(_ => toK(RAND.nextInt(3))) + val data2 = (1 to 3000).map(_ => toK(RAND.nextInt(10))) + val data3 = (1 to 1450).map(_ => toK(-1)) // element close to being a 20% heavy hitter val data = data1 ++ data2 ++ data3 // Find elements that appear at least 20% of the time. @@ -564,11 +556,9 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) // (heavyHittersPct - eps) * totalCount is claimed as a heavy hitter. val minHhCount = (heavyHittersPct - cms.eps) * cms.totalCount val infrequent = data - .groupBy { x => - x - } - .mapValues { _.size } - .filter { _._2 < minHhCount } + .groupBy(x => x) + .mapValues(_.size) + .filter(_._2 < minHhCount) .keys .toSet infrequent.intersect(estimatedHhs) should be('empty) @@ -935,9 +925,7 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) val heavyHittersN = 2 val data = Seq(1, 2, 2, 3, 3, 3, 6, 6, 6, 6, 6, 6) - .flatMap { i => - Seq((4, i), (7, i + 2)) - } + .flatMap(i => Seq((4, i), (7, i + 2))) .map(pairToK) val monoid = ScopedTopNCMS.monoid[K, K](EPS, DELTA, SEED, heavyHittersN) val cms = monoid.create(data) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala index 2f2f2073b..60477dd43 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala @@ -42,9 +42,7 @@ class DecayedValueLaws extends CheckProperties { } yield Params(x, hl, c, n) } - averageApproxEq { (dv, params) => - dv.average(params.halfLife) - } + averageApproxEq((dv, params) => dv.average(params.halfLife)) } property("for large HL but small count, averageFrom(f(t)=x)=x") { @@ -57,9 +55,7 @@ class DecayedValueLaws extends CheckProperties { } yield Params(x, hl, c, n) } - averageApproxEq { (dv, params) => - dv.averageFrom(params.halfLife, 0, params.count) - } + averageApproxEq((dv, params) => dv.averageFrom(params.halfLife, 0, params.count)) } property("for small HL but large count, discreteAverage(f(t)=x)=x") { @@ -72,8 +68,6 @@ class DecayedValueLaws extends CheckProperties { } yield Params(x, hl, c, n) } - averageApproxEq { (dv, params) => - dv.discreteAverage(params.halfLife) - } + averageApproxEq((dv, params) => dv.discreteAverage(params.halfLife)) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index 7bd13259e..4e4710e29 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -182,9 +182,7 @@ class EventuallyAggregatorLaws extends AnyPropSpec with ScalaCheckPropertyChecks def mustConvert(o: List[Int]) = pred(o) val leftSemigroup = Semigroup.doubleSemigroup - def rightAggregator = rightAg.andThenPresent { _ => - "Right" - } + def rightAggregator = rightAg.andThenPresent(_ => "Right") } property("EventuallyAggregator converts correctly") { @@ -198,9 +196,7 @@ class EventuallyAggregatorLaws extends AnyPropSpec with ScalaCheckPropertyChecks * For HLL/Set, which is the common example, this is lawful. */ forAll { (in: List[Int], thresh: Int, rightAg: Aggregator[Int, List[Int], Int]) => - val pred = { x: List[Int] => - x.lengthCompare(thresh) > 0 - } + val pred = { x: List[Int] => x.lengthCompare(thresh) > 0 } val eventuallyAg = eventuallyAggregator(rightAg)(pred) eventuallyAg.semigroup diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala index d0ac10995..98d4dfb17 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala @@ -120,17 +120,13 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { } // every histogram's relative error stays within bounds. - histograms.foreach { e => - assert(e.relativeError <= e.conf.epsilon) - } + histograms.foreach(e => assert(e.relativeError <= e.conf.epsilon)) } } property("Invariant 2: bucket sizes are nondecreasing powers of two") { forAll { e: ExpHist => - assert(e.buckets.forall { b => - isPowerOfTwo(b.size) - }) + assert(e.buckets.forall(b => isPowerOfTwo(b.size))) // sizes are nondecreasing: val sizes = e.buckets.map(_.size) @@ -139,15 +135,11 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { } property("Total tracked by e is the sum of all bucket sizes") { - forAll { e: ExpHist => - assert(e.buckets.map(_.size).sum == e.total) - } + forAll { e: ExpHist => assert(e.buckets.map(_.size).sum == e.total) } } property("ExpHist bucket sizes are the l-canonical rep of the tracked total") { - forAll { e: ExpHist => - assert(e.buckets.map(_.size) == Canonical.bucketsFromLong(e.total, e.conf.l)) - } + forAll { e: ExpHist => assert(e.buckets.map(_.size) == Canonical.bucketsFromLong(e.total, e.conf.l)) } } property("adding i results in upperBoundSum == i") { @@ -188,9 +180,7 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { } property("step(t) == add(0, t)") { - forAll { (expHist: ExpHist, ts: Timestamp) => - assert(expHist.step(ts) == expHist.add(0, ts)) - } + forAll((expHist: ExpHist, ts: Timestamp) => assert(expHist.step(ts) == expHist.add(0, ts))) } property("add(i) and inc i times should generate the same EH") { @@ -245,9 +235,7 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { assert(rebucketed.map(_.size) == desired) // all bucket sizes are now powers of two. - assert(rebucketed.forall { b => - isPowerOfTwo(b.size) - }) + assert(rebucketed.forall(b => isPowerOfTwo(b.size))) } } } @@ -267,9 +255,7 @@ class CanonicalLaws extends AnyPropSpec with ScalaCheckPropertyChecks { } property("canonical representation round-trips") { - forAll { (i: PosNum[Long], l: PosNum[Short]) => - assert(fromLong(i.value, l.value).toLong == i.value) - } + forAll((i: PosNum[Long], l: PosNum[Short]) => assert(fromLong(i.value, l.value).toLong == i.value)) } property("fromLong(i, k).sum == # of buckets required to encode i") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/FirstLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/FirstLaws.scala index ab1580aed..03a856206 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/FirstLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/FirstLaws.scala @@ -14,16 +14,12 @@ class FirstLaws extends CheckProperties { } property("First.+ should work") { - forAll { (l: First[Int], r: First[Int]) => - l + r == l - } + forAll((l: First[Int], r: First[Int]) => l + r == l) } property("First.aggregator returns the first item") { - forAll { v: NonEmptyVector[Int] => - v.items.head == First.aggregator(v.items) - } + forAll { v: NonEmptyVector[Int] => v.items.head == First.aggregator(v.items) } } - property("First[Int] is a semigroup") { semigroupLaws[First[Int]] } + property("First[Int] is a semigroup")(semigroupLaws[First[Int]]) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala index 303806393..ee65b33b1 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala @@ -19,16 +19,17 @@ class FoldTest extends AnyWordSpec { } def run[I, O](fold: Fold[I, O], cases: Case[I, O]*): Unit = - cases.foreach { c => - assert(c.runCase(fold) === c.expected) - } + cases.foreach(c => assert(c.runCase(fold) === c.expected)) "Fold" should { "foldLeft" in { - run[String, String](Fold.foldLeft("") { (a, b) => - a ++ b - }, Zero(""), One("1", "1"), Many(Seq("1", "2", "3"), "123")) + run[String, String]( + Fold.foldLeft("")((a, b) => a ++ b), + Zero(""), + One("1", "1"), + Many(Seq("1", "2", "3"), "123") + ) } "seq" in { @@ -92,13 +93,16 @@ class FoldTest extends AnyWordSpec { } "average" in { - run[Int, Double](Fold.sum[Int].joinWith(Fold.size) { (s, c) => - s.toDouble / c - }, One(1, 1.0), Many(Seq(1, 2, 3), 2.0), Many(Seq(2, 1, 3), 2.0)) + run[Int, Double]( + Fold.sum[Int].joinWith(Fold.size)((s, c) => s.toDouble / c), + One(1, 1.0), + Many(Seq(1, 2, 3), 2.0), + Many(Seq(2, 1, 3), 2.0) + ) } "sequence" in { - run[Int, Seq[Long]](Fold.sequence(Seq(Fold.count { _ < 0 }, Fold.count { + run[Int, Seq[Long]](Fold.sequence(Seq(Fold.count(_ < 0), Fold.count { _ >= 0 })), Zero(Seq(0, 0)), One(1, Seq(0, 1)), Many(Seq(-2, -1, 0, 1, 2), Seq(2, 3))) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/FunctionMonoidTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/FunctionMonoidTests.scala index e82c94727..26927ead4 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/FunctionMonoidTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/FunctionMonoidTests.scala @@ -8,9 +8,7 @@ class FunctionMonoidTests extends CheckProperties { // TODO: switch the scope of the quantification? Prop.forAll { (n: Int) => implicit val eq: Equiv[Function1[Int, Int]] = - Equiv.fromFunction { (f1, f2) => - f1(n) == f2(n) - } + Equiv.fromFunction((f1, f2) => f1(n) == f2(n)) monoidLaws[Function1[Int, Int]] } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala index 0a0d36b17..7c670116d 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala @@ -31,9 +31,7 @@ class HyperLogLogSeriesLaws extends CheckProperties { } property("HyperLogLogSeries is commutative") { - Prop.forAll { (h: HLLSeries, ts: List[Timestamp]) => - absorb(h, ts) == absorb(h, ts.reverse) - } + Prop.forAll((h: HLLSeries, ts: List[Timestamp]) => absorb(h, ts) == absorb(h, ts.reverse)) } property("series.approximateSizeSince(start) = h.since(t).toHLL.approximateSize") { @@ -43,9 +41,7 @@ class HyperLogLogSeriesLaws extends CheckProperties { } property("h.insert(bs, t) = m.plus(h, m.create(bs, t))") { - Prop.forAll { (h: HLLSeries, ts: List[Timestamp]) => - absorb(h, ts) == directAbsorb(h, ts) - } + Prop.forAll((h: HLLSeries, ts: List[Timestamp]) => absorb(h, ts) == directAbsorb(h, ts)) } // this is a deterministic test to ensure that our rates are staying @@ -67,9 +63,7 @@ class HyperLogLogSeriesLaws extends CheckProperties { // possible future regressions (where the error rate gets worse // than expected). val cardinalities = List(1024, 2048, 4096, 8192, 16384, 32768, 65536) - cardinalities.forall { n => - verify(n, 0.1) - } + cardinalities.forall(n => verify(n, 0.1)) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index 3a719c11a..6b9136dfe 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -18,10 +18,8 @@ object ReferenceHyperLogLog { def bytesToBitSet(in: Array[Byte]): BitSet = BitSet( in.zipWithIndex - .map { bi => - (bi._1, bi._2 * 8) - } - .flatMap { byteToIndicator(_) }: _* + .map(bi => (bi._1, bi._2 * 8)) + .flatMap(byteToIndicator(_)): _* ) def byteToIndicator(bi: (Byte, Int)): Seq[Int] = (0 to 7).flatMap { i => @@ -34,8 +32,8 @@ object ReferenceHyperLogLog { def jRhoW(in: Array[Byte], bits: Int): (Int, Byte) = { val onBits = bytesToBitSet(in) - val j = onBits.filter { _ < bits }.map { 1 << _ }.sum - val rhow = onBits.find { _ >= bits }.map { _ - bits + 1 }.getOrElse(0) + val j = onBits.filter(_ < bits).map(1 << _).sum + val rhow = onBits.find(_ >= bits).map(_ - bits + 1).getOrElse(0) (j, rhow.toByte) } @@ -77,9 +75,7 @@ class HyperLogLogLaws extends CheckProperties { * serialized HLLs */ property("HyperLogLog.hash matches reference") { - Prop.forAll { a: Array[Byte] => - HyperLogLog.hash(a).toSeq == ReferenceHyperLogLog.hash(a).toSeq - } + Prop.forAll { a: Array[Byte] => HyperLogLog.hash(a).toSeq == ReferenceHyperLogLog.hash(a).toSeq } } property("HyperLogLog.j and rhow match reference") { @@ -106,9 +102,7 @@ class jRhoWMatchTest extends AnyPropSpec with ScalaCheckPropertyChecks with Matc } property("jRhoW matches referenceJRhoW") { - forAll { (in: Array[Byte], bits: Int) => - assert(jRhoW(in, bits) == ReferenceHyperLogLog.jRhoW(in, bits)) - } + forAll((in: Array[Byte], bits: Int) => assert(jRhoW(in, bits) == ReferenceHyperLogLog.jRhoW(in, bits))) } } @@ -152,7 +146,7 @@ class HLLIntersectionProperty[T: Hash128: Gen](bits: Int, numHlls: Int) extends type Input = Unit type Result = Long - def makeApproximate(it: Seq[Seq[T]]) = it.map { iterableToHLL(_) } + def makeApproximate(it: Seq[Seq[T]]) = it.map(iterableToHLL(_)) def exactGenerator: Gen[Seq[Seq[T]]] = { val vectorGenerator: Gen[Seq[T]] = @@ -298,18 +292,16 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { def exactCount[T](it: Iterable[T]): Int = it.toSet.size def approxCount[T <% Array[Byte]](bits: Int, it: Iterable[T]) = { val hll = new HyperLogLogMonoid(bits) - hll.sizeOf(hll.sum(it.map { hll.create(_) })).estimate.toDouble + hll.sizeOf(hll.sum(it.map(hll.create(_)))).estimate.toDouble } def aveErrorOf(bits: Int): Double = 1.04 / scala.math.sqrt(1 << bits) def testDownsize(dataSize: Int)(oldBits: Int, newBits: Int): Unit = { - val data = (0 until dataSize).map { _ => - r.nextLong - } + val data = (0 until dataSize).map(_ => r.nextLong) val exact = exactCount(data).toDouble val hll = new HyperLogLogMonoid(oldBits) - val oldHll = hll.sum(data.map { hll.create(_) }) + val oldHll = hll.sum(data.map(hll.create(_))) val newHll = oldHll.downsize(newBits) assert(scala.math.abs(exact - newHll.estimatedSize) / exact < 3.5 * aveErrorOf(newBits)) } @@ -342,12 +334,8 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { } "be consistent for sparse vs. dense" in { val mon = new HyperLogLogMonoid(12) - val data = (1 to 100).map { _ => - r.nextLong - } - val partialSums = data.foldLeft(Seq(mon.zero)) { (seq, value) => - seq :+ (seq.last + mon.create(value)) - } + val data = (1 to 100).map(_ => r.nextLong) + val partialSums = data.foldLeft(Seq(mon.zero))((seq, value) => seq :+ (seq.last + mon.create(value))) // Now the ith entry of partialSums (0-based) is an HLL structure for i underlying elements partialSums.foreach { hll => assert(hll.isInstanceOf[SparseHLL]) @@ -360,12 +348,8 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { } "properly convert to dense" in { val mon = new HyperLogLogMonoid(10) - val data = (1 to 200).map { _ => - r.nextLong - } - val partialSums = data.foldLeft(Seq(mon.zero)) { (seq, value) => - seq :+ (seq.last + mon.create(value)) - } + val data = (1 to 200).map(_ => r.nextLong) + val partialSums = data.foldLeft(Seq(mon.zero))((seq, value) => seq :+ (seq.last + mon.create(value))) partialSums.foreach { hll => if (hll.size - hll.zeroCnt <= 64) { assert(hll.isInstanceOf[SparseHLL]) @@ -376,23 +360,17 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { } "properly do a batch create" in { val mon = new HyperLogLogMonoid(10) - val data = (1 to 200).map { _ => - r.nextLong - } + val data = (1 to 200).map(_ => r.nextLong) val partialSums = data.foldLeft(IndexedSeq(mon.zero)) { (seq, value) => seq :+ (seq.last + mon.create(value)) } - (1 to 200).map { n => - assert(partialSums(n) == mon.batchCreate(data.slice(0, n))) - } + (1 to 200).map(n => assert(partialSums(n) == mon.batchCreate(data.slice(0, n)))) } "work as an Aggregator and return a HLL" in { List(5, 7, 8, 10).foreach { bits => val aggregator = HyperLogLogAggregator(bits) - val data = (0 to 10000).map { _ => - r.nextInt(1000) - } + val data = (0 to 10000).map(_ => r.nextInt(1000)) val exact = exactCount(data).toDouble val approxCount = @@ -404,9 +382,7 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { "work as an Aggregator and return size" in { List(5, 7, 8, 10).foreach { bits => val aggregator = HyperLogLogAggregator.sizeAggregator(bits) - val data = (0 to 10000).map { _ => - r.nextInt(1000) - } + val data = (0 to 10000).map(_ => r.nextInt(1000)) val exact = exactCount(data).toDouble val estimate = aggregator(data.map(int2Bytes(_))) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala index cc5fd65ea..266f44c40 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala @@ -70,27 +70,19 @@ class IntervalLaws extends CheckProperties { } property("[x, x + 1) does not contain x + 1") { - forAll { x: Int => - !Interval.leftClosedRightOpen(x, x + 1).contains(x + 1) - } + forAll { x: Int => !Interval.leftClosedRightOpen(x, x + 1).contains(x + 1) } } property("(x, x + 1] does not contain x") { - forAll { x: Int => - !Interval.leftOpenRightClosed(x, x + 1).contains(x) - } + forAll { x: Int => !Interval.leftOpenRightClosed(x, x + 1).contains(x) } } property("[x, x) is empty") { - forAll { x: Int => - Interval.leftClosedRightOpen(x, x).isEmpty - } + forAll { x: Int => Interval.leftClosedRightOpen(x, x).isEmpty } } property("(x, x] is empty") { - forAll { x: Int => - Interval.leftOpenRightClosed(x, x).isEmpty - } + forAll { x: Int => Interval.leftOpenRightClosed(x, x).isEmpty } } property("[x, y).isEmpty == (x >= y)") { @@ -135,33 +127,23 @@ class IntervalLaws extends CheckProperties { } property("[n, inf) and (-inf, n] intersect") { - forAll { (n: Long) => - InclusiveLower(n).intersects(InclusiveUpper(n)) - } + forAll((n: Long) => InclusiveLower(n).intersects(InclusiveUpper(n))) } property("(x, inf) and (-inf, y) intersects if and only if y > x") { - forAll { (x: Long, y: Long) => - ((y > x) == ExclusiveLower(x).intersects(ExclusiveUpper(y))) - } + forAll((x: Long, y: Long) => ((y > x) == ExclusiveLower(x).intersects(ExclusiveUpper(y)))) } property("(x, inf) and (-inf, y] intersect if and only if y > x") { - forAll { (x: Long, y: Long) => - ((y > x) == ExclusiveLower(x).intersects(InclusiveUpper(y))) - } + forAll((x: Long, y: Long) => ((y > x) == ExclusiveLower(x).intersects(InclusiveUpper(y)))) } property("[x, inf) and (-inf, y) intersect if and only if y > x") { - forAll { (x: Long, y: Long) => - ((y > x) == InclusiveLower(x).intersects(ExclusiveUpper(y))) - } + forAll((x: Long, y: Long) => ((y > x) == InclusiveLower(x).intersects(ExclusiveUpper(y)))) } property("[x, inf) and (-inf, y] intersect if and only if y >= x") { - forAll { (x: Long, y: Long) => - ((y >= x) == InclusiveLower(x).intersects(InclusiveUpper(y))) - } + forAll((x: Long, y: Long) => ((y >= x) == InclusiveLower(x).intersects(InclusiveUpper(y)))) } def lowerUpperIntersection(low: Lower[Long], upper: Upper[Long], items: List[Long]) = @@ -182,9 +164,7 @@ class IntervalLaws extends CheckProperties { } else { // nothing is in both low.least.map(upper.contains(_) == false).getOrElse(true) && - items.forall { i => - (low.contains(i) && upper.contains(i)) == false - } && + items.forall(i => (low.contains(i) && upper.contains(i)) == false) && (low && upper match { case Empty() => true case _ => false @@ -210,9 +190,7 @@ class IntervalLaws extends CheckProperties { .map { case Intersection(InclusiveLower(low), ExclusiveUpper(high)) => val intr2 = Interval.leftClosedRightOpen(low, high) - tests.forall { t => - intr(t) == intr2(t) - } + tests.forall(t => intr(t) == intr2(t)) } .getOrElse(true)) // none means this can't be expressed as this kind of interval } @@ -220,7 +198,7 @@ class IntervalLaws extends CheckProperties { property("least is the smallest") { forAll { (lower: Lower[Long]) => - ((for { + (for { le <- lower.least ple <- Predecessible.prev(le) } yield lower.contains(le) && !lower.contains(ple)) @@ -229,13 +207,13 @@ class IntervalLaws extends CheckProperties { case InclusiveLower(l) => l == Long.MinValue case ExclusiveLower(l) => l == Long.MaxValue } - }) + } } } property("greatest is the biggest") { forAll { (upper: Upper[Long]) => - ((for { + (for { gr <- upper.greatest ngr <- Successible.next(gr) } yield upper.contains(gr) && !upper.contains(ngr)) @@ -244,7 +222,7 @@ class IntervalLaws extends CheckProperties { case InclusiveUpper(l) => l == Long.MaxValue case ExclusiveUpper(l) => l == Long.MinValue } - }) + } } } @@ -279,9 +257,7 @@ class IntervalLaws extends CheckProperties { forAll { (intr: Interval[Long], i: Long, rest: List[Long]) => intr.boundedLeast match { case Some(l) => - (i :: rest).forall { v => - !intr(v) || (l <= v) - } + (i :: rest).forall(v => !intr(v) || (l <= v)) case None => true } } @@ -290,9 +266,7 @@ class IntervalLaws extends CheckProperties { forAll { (intr: Interval[Long], i: Long, rest: List[Long]) => intr.boundedGreatest match { case Some(u) => - (i :: rest).forall { v => - !intr(v) || (v <= u) - } + (i :: rest).forall(v => !intr(v) || (v <= u)) case None => true } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala index 85b8cce92..0685866da 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala @@ -54,7 +54,7 @@ class JavaBoxedTests extends CheckProperties { // TODO add testing with JFloat/JDouble but check for approximate equals, pain in the ass. implicit def jlist[T: Arbitrary] = Arbitrary { - implicitly[Arbitrary[List[T]]].arbitrary.map { _.asJava } + implicitly[Arbitrary[List[T]]].arbitrary.map(_.asJava) } property("JList is a Monoid") { @@ -63,9 +63,7 @@ class JavaBoxedTests extends CheckProperties { implicit def jmap[K: Arbitrary, V: Arbitrary: Semigroup] = Arbitrary { implicitly[Arbitrary[Map[K, V]]].arbitrary.map { - _.filter { kv => - isNonZero[V](kv._2) - }.asJava + _.filter(kv => isNonZero[V](kv._2)).asJava } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/LastLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/LastLaws.scala index 85ad8f66e..49b54dce2 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/LastLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/LastLaws.scala @@ -14,16 +14,12 @@ class LastLaws extends CheckProperties { } property("Last.+ should work") { - forAll { (l: Last[Int], r: Last[Int]) => - l + r == r - } + forAll((l: Last[Int], r: Last[Int]) => l + r == r) } property("Last.aggregator returns the last item") { - forAll { v: NonEmptyVector[Int] => - v.items.last == Last.aggregator(v.items) - } + forAll { v: NonEmptyVector[Int] => v.items.last == Last.aggregator(v.items) } } - property("Last[Int] is a Semigroup") { semigroupLaws[Last[Int]] } + property("Last[Int] is a Semigroup")(semigroupLaws[Last[Int]]) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala index debbfbc45..bf7e36818 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala @@ -15,7 +15,7 @@ class MaxLaws extends CheckProperties { def maxSemiGroupTest[T: Arbitrary: Ordering] = forAll { v: NonEmptyVector[T] => - val maxItems = v.items.map { Max(_) } + val maxItems = v.items.map(Max(_)) v.items.max == Max.semigroup[T].combineAllOption(maxItems).get.get } @@ -27,12 +27,10 @@ class MaxLaws extends CheckProperties { val sgString = implicitly[Semigroup[Max[String]]] val monoidString = implicitly[Monoid[Max[String]]] - property("Max.{ +, max } works on ints") { maxTest[Int] } + property("Max.{ +, max } works on ints")(maxTest[Int]) property("Max.aggregator returns the maximum item") { - forAll { v: NonEmptyVector[Int] => - v.items.max == Max.aggregator[Int].apply(v.items) - } + forAll { v: NonEmptyVector[Int] => v.items.max == Max.aggregator[Int].apply(v.items) } } property("Max.semigroup[Int] returns the maximum item") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala index ec4abfe63..1dd77b2aa 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala @@ -57,19 +57,11 @@ class MinHasherSpec extends AnyWordSpec with Matchers { def approxSimilarity[T, H](mh: MinHasher[H], x: Set[T], y: Set[T]) = { val sig1 = x - .map { l => - mh.init(l.toString) - } - .reduce { (a, b) => - mh.plus(a, b) - } + .map(l => mh.init(l.toString)) + .reduce((a, b) => mh.plus(a, b)) val sig2 = y - .map { l => - mh.init(l.toString) - } - .reduce { (a, b) => - mh.plus(a, b) - } + .map(l => mh.init(l.toString)) + .reduce((a, b) => mh.plus(a, b)) mh.similarity(sig1, sig2) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala index cd9a0958a..fb9a7ba01 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala @@ -15,23 +15,21 @@ class MinLaws extends CheckProperties { def minSemigroupTest[T: Arbitrary: Ordering] = forAll { v: NonEmptyVector[T] => - val minItems = v.items.map { Min(_) } + val minItems = v.items.map(Min(_)) v.items.min == Min.semigroup[T].combineAllOption(minItems).get.get } // Test equiv import. val equiv = implicitly[Equiv[Min[Int]]] - property("Min.{ +, min } works on ints") { minTest[Int] } + property("Min.{ +, min } works on ints")(minTest[Int]) property("Min should work on non-monoid types like String") { minTest[String] } property("Min.aggregator returns the minimum item") { - forAll { v: NonEmptyVector[Int] => - v.items.min == Min.aggregator[Int].apply(v.items) - } + forAll { v: NonEmptyVector[Int] => v.items.min == Min.aggregator[Int].apply(v.items) } } property("Min.semigroup[Int] returns the minimum item") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 2ea91ac31..625f67d17 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -31,9 +31,7 @@ class MomentsTest extends AnyWordSpec with Matchers { * the list's central moments. */ def getMoments(xs: List[Double]): Moments = - xs.foldLeft(MomentsGroup.zero) { (m, x) => - MomentsGroup.plus(m, Moments(x)) - } + xs.foldLeft(MomentsGroup.zero)((m, x) => MomentsGroup.plus(m, Moments(x))) "Moments should count" in { val m1 = getMoments(List(1, 2, 3, 4, 5)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala index 21e13c48e..3e4a8f748 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala @@ -86,9 +86,7 @@ class MonadInstanceLaws extends CheckProperties { bigReader(m1) // This should be the same as this loop: - fns.foreach { fn => - m2.inc(fn(m2.item)) - } + fns.foreach(fn => m2.inc(fn(m2.item))) m1.item == m2.item } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala index cf9057da4..441375259 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala @@ -40,7 +40,7 @@ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks wit (a == mon.plus(mon.zero, a)) && (a == mon.plus(a, mon.zero)) && (a == grp.minus(a, grp.zero)) && - (mon.nonZeroOption(a) == Some(a).filter { _ != num.zero }) + (mon.nonZeroOption(a) == Some(a).filter(_ != num.zero)) ) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/PredecessibleProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/PredecessibleProperties.scala index 876e384c8..8023122a2 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/PredecessibleProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/PredecessibleProperties.scala @@ -18,9 +18,9 @@ package com.twitter.algebird class PredecessibleProperties extends CheckProperties { import com.twitter.algebird.PredecessibleLaws.{predecessibleLaws => laws} - property("Int is Predecessible") { laws[Int] } - property("Long is Predecessible") { laws[Long] } - property("BigInt is Predecessible") { laws[BigInt] } + property("Int is Predecessible")(laws[Int]) + property("Long is Predecessible")(laws[Long]) + property("BigInt is Predecessible")(laws[BigInt]) property("Predecessible.fromPrevOrd[Int] is Predecessible") { implicit val pred = Predecessible.fromPrevOrd[Int](IntegralPredecessible.prev(_)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala index 073fa1aef..28cf1f452 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala @@ -45,9 +45,7 @@ class PreparerLaws extends CheckProperties { property("split with two aggregators is correct") { forAll { (in: List[Int], ag1: Aggregator[Int, Set[Int], Int], ag2: Aggregator[Int, Unit, String]) => - val c = Preparer[Int].split { p => - (p.aggregate(ag1), p.aggregate(ag2)) - } + val c = Preparer[Int].split(p => (p.aggregate(ag1), p.aggregate(ag2))) in.isEmpty || c(in) == ((ag1(in), ag2(in))) } } @@ -76,17 +74,22 @@ class PreparerLaws extends CheckProperties { } property("map, flatMap, and split all together are correct") { - forAll { (in: List[Int], mapFn: (Int => Int), flatMapFn: (Int => List[Int]), ag1: MonoidAggregator[Int, Int, Int], ag2: MonoidAggregator[Int, Int, Int]) => - val ag = - Preparer[Int] - .map(mapFn) - .flatMap(flatMapFn) - .split { a => - (a.aggregate(ag1), a.aggregate(ag2)) - } + forAll { + ( + in: List[Int], + mapFn: (Int => Int), + flatMapFn: (Int => List[Int]), + ag1: MonoidAggregator[Int, Int, Int], + ag2: MonoidAggregator[Int, Int, Int] + ) => + val ag = + Preparer[Int] + .map(mapFn) + .flatMap(flatMapFn) + .split(a => (a.aggregate(ag1), a.aggregate(ag2))) - val preSplit = in.map(mapFn).flatMap(flatMapFn) - in.isEmpty || ag(in) == ((ag1(preSplit), ag2(preSplit))) + val preSplit = in.map(mapFn).flatMap(flatMapFn) + in.isEmpty || ag(in) == ((ag1(preSplit), ag2(preSplit))) } } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala index cb1accf5b..bf916cc1b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala @@ -42,7 +42,7 @@ class QTreeTest extends AnyWordSpec with Matchers { def buildQTree(k: Int, list: Seq[Double]) = { val qtSemigroup = new QTreeSemigroup[Double](k) - qtSemigroup.sumOption(list.map { QTree(_) }).get + qtSemigroup.sumOption(list.map(QTree(_))).get } def trueQuantile[T: Ordering](list: Seq[T], q: Double): T = { @@ -52,7 +52,7 @@ class QTreeTest extends AnyWordSpec with Matchers { } def trueRangeSum(list: Seq[Double], from: Double, to: Double) = - list.filter { _ >= from }.filter { _ < to }.sum + list.filter(_ >= from).filter(_ < to).sum for (k <- Seq(3, 11, 51, 101)) { s"QTree with elements (1 to $k)" should { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala b/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala index 3af7bf4d9..4b9f181e1 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala @@ -57,7 +57,7 @@ class RightFolded2Test extends CheckProperties { case RightFoldedValue2(v, _, _) => { Some( l.dropRight(1) - .flatMap { _.asInstanceOf[RightFoldedToFold2[In]].in } + .flatMap(_.asInstanceOf[RightFoldedToFold2[In]].in) .foldRight(v)(foldfn) ) } @@ -74,7 +74,7 @@ class RightFolded2Test extends CheckProperties { val chunks = chunk(l)(notIsVal) val grp = implicitly[Group[Acc]] - val vals = chunks.map { fold(_)(foldfn).map(mapfn).getOrElse(grp.zero) } + val vals = chunks.map(fold(_)(foldfn).map(mapfn).getOrElse(grp.zero)) grp.sum(vals) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala index 04da4952e..80d3d03ce 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala @@ -19,9 +19,7 @@ class RightFoldedTest extends CheckProperties { Gen.oneOf(rightFoldedValue[Out].arbitrary, rightFoldedToFold[In].arbitrary) } - implicit val rightFoldedMonoid = RightFolded.monoid[Int, Long] { (i, l) => - l + i.toLong - } + implicit val rightFoldedMonoid = RightFolded.monoid[Int, Long]((i, l) => l + i.toLong) property("RightFolded is a monoid") { monoidLaws[RightFolded[Int, Long]] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala index a6996eb8e..3dc45c328 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala @@ -52,9 +52,7 @@ class SGDLaws extends CheckProperties { } property("Gradient at x=0 has zero first component") { - forAll { (w: SGDWeights, y: Double) => - (SGD.linearGradient(w.weights, (y, IndexedSeq(0.0)))(0) == 0.0) - } + forAll((w: SGDWeights, y: Double) => (SGD.linearGradient(w.weights, (y, IndexedSeq(0.0)))(0) == 0.0)) } property("Zero-step leaves Weights unchanged") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala index 1ebb5729d..d920df00a 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala @@ -23,9 +23,7 @@ class SetDiffTest extends AnyWordSpec with Matchers with Checkers { check(BaseProperties.monoidLaws[SetDiff[Int]]) } "be idempotent" in { - check { (d: SetDiff[Int]) => - d.merge(d) == d - } + check((d: SetDiff[Int]) => d.merge(d) == d) } /** @@ -46,40 +44,26 @@ class SetDiffTest extends AnyWordSpec with Matchers with Checkers { } } "+ is the same as SetDiff.add" in { - check { (d: SetDiff[Int], inc: Int) => - d + inc == (d.merge(SetDiff.add(inc))) - } + check((d: SetDiff[Int], inc: Int) => d + inc == (d.merge(SetDiff.add(inc)))) } "- is the same as SetDiff.remove" in { - check { (d: SetDiff[Int], dec: Int) => - d - dec == (d.merge(SetDiff.remove(dec))) - } + check((d: SetDiff[Int], dec: Int) => d - dec == (d.merge(SetDiff.remove(dec)))) } "++ is the same as SetDiff.addAll" in { - check { (d: SetDiff[Int], inc: Set[Int]) => - d ++ inc == (d.merge(SetDiff.addAll(inc))) - } + check((d: SetDiff[Int], inc: Set[Int]) => d ++ inc == (d.merge(SetDiff.addAll(inc)))) } "-- is the same as SetDiff.removeAll" in { - check { (d: SetDiff[Int], dec: Set[Int]) => - d -- dec == (d.merge(SetDiff.removeAll(dec))) - } + check((d: SetDiff[Int], dec: Set[Int]) => d -- dec == (d.merge(SetDiff.removeAll(dec)))) } "+ then - is the same as -" in { - check { (i: Int) => - (SetDiff.add(i).merge(SetDiff.remove(i))) == SetDiff.remove(i) - } + check((i: Int) => (SetDiff.add(i).merge(SetDiff.remove(i))) == SetDiff.remove(i)) } "- then + is the same as +" in { - check { (i: Int) => - (SetDiff.remove(i).merge(SetDiff.add(i))) == SetDiff.add(i) - } + check((i: Int) => (SetDiff.remove(i).merge(SetDiff.add(i))) == SetDiff.add(i)) } "apply diffs between sets" in { - check { (oldSet: Set[String], newSet: Set[String]) => - SetDiff.of(oldSet, newSet)(oldSet) == newSet - } + check((oldSet: Set[String], newSet: Set[String]) => SetDiff.of(oldSet, newSet)(oldSet) == newSet) } "create proper diffs" in { @@ -100,9 +84,7 @@ class SetDiffTest extends AnyWordSpec with Matchers with Checkers { } "apply distributes over merge" in { - check { (init: Set[Int], a: SetDiff[Int], b: SetDiff[Int]) => - a.merge(b)(init) == b(a(init)) - } + check((init: Set[Int], a: SetDiff[Int], b: SetDiff[Int]) => a.merge(b)(init) == b(a(init))) } "strict application fails if the diff tries to remove extra items" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala index e72b485c0..855f95cb7 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala @@ -48,9 +48,7 @@ class SketchMapTest extends AnyWordSpec with Matchers { "count total number of elements in a stream" in { val totalCount = 1243 val range = 234 - val data = (0 to (totalCount - 1)).map { _ => - (RAND.nextInt(range), 1L) - } + val data = (0 to (totalCount - 1)).map(_ => (RAND.nextInt(range), 1L)) val sm = MONOID.create(data) assert(sm.totalValue == totalCount) } @@ -123,7 +121,7 @@ class SketchMapTest extends AnyWordSpec with Matchers { // Ordering that orders from biggest to smallest (so that HeavyHitters // are the smallest numbers). - val smallerOrdering: Ordering[Long] = Ordering.by[Long, Long] { -_ } + val smallerOrdering: Ordering[Long] = Ordering.by[Long, Long](-_) val monoid = SketchMap.monoid[Int, Long](PARAMS)(smallerOrdering, smallerMonoid) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala index 86885a17b..0c4fc72e4 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala @@ -19,9 +19,8 @@ class SpaceSaverLaws extends CheckProperties { forAll(Gen.choose(1, 100)) { range => // need a non-uniform distro implicit val ssGenOne: Arbitrary[SSOne[Int]] = Arbitrary { - for (key <- Gen.frequency((1 to range).map { x => - (x * x, x: Gen[Int]) - }: _*)) yield SpaceSaver(capacity, key).asInstanceOf[SSOne[Int]] + for (key <- Gen.frequency((1 to range).map(x => (x * x, x: Gen[Int])): _*)) + yield SpaceSaver(capacity, key).asInstanceOf[SSOne[Int]] } implicit def ssGen(implicit sg: Semigroup[SpaceSaver[Int]]): Arbitrary[SpaceSaver[Int]] = Arbitrary { @@ -34,9 +33,7 @@ class SpaceSaverLaws extends CheckProperties { } implicit def equiv[T]: Equiv[SpaceSaver[T]] = - Equiv.fromFunction { (left, right) => - (left.consistentWith(right)) && (right.consistentWith(left)) - } + Equiv.fromFunction((left, right) => (left.consistentWith(right)) && (right.consistentWith(left))) commutativeSemigroupLaws[SpaceSaver[Int]] } @@ -71,9 +68,7 @@ class SpaceSaverTest extends AnyWordSpec with Matchers { "SpaceSaver" should { "produce a top 20 with exact bounds" in { - val gen = Gen.frequency((1 to 100).map { x => - (x * x, x: Gen[Int]) - }: _*) + val gen = Gen.frequency((1 to 100).map(x => (x * x, x: Gen[Int])): _*) val items = (1 to 1000).map(_ => gen.sample.get) val exactCounts = items.groupBy(identity).mapValues(_.size) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SuccessibleProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/SuccessibleProperties.scala index 7cd47ac97..901305cea 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SuccessibleProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SuccessibleProperties.scala @@ -21,9 +21,9 @@ import org.scalacheck.Prop.forAll class SuccessibleProperties extends CheckProperties { import com.twitter.algebird.SuccessibleLaws.{successibleLaws => laws} - property("Int is Successible") { laws[Int] } - property("Long is Successible") { laws[Long] } - property("BigInt is Successible") { laws[BigInt] } + property("Int is Successible")(laws[Int]) + property("Long is Successible")(laws[Long]) + property("BigInt is Successible")(laws[BigInt]) property("Successible.fromNextOrd[Int] is Successible") { implicit val succ = Successible.fromNextOrd[Int](IntegralSuccessible.next(_)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala index 8180c8d7c..c0a8f7fe4 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala @@ -30,9 +30,7 @@ object SummingIteratorTest { zl.forall { case (k, v) => zr.get(k) - .map { rv => - Equiv[V].equiv(rv, v) - } + .map(rv => Equiv[V].equiv(rv, v)) .getOrElse(false) } } @@ -51,7 +49,7 @@ class SummingIteratorTest extends AnyPropSpec with ScalaCheckPropertyChecks with property("With Maps is preserved[(Short,Int)]") { forAll { (cap: Capacity, items: List[(Short, Int)]) => - val mitems = items.map { Map(_) } + val mitems = items.map(Map(_)) val qit = SummingIterator[Map[Short, Int]](SummingQueue[Map[Short, Int]](cap.c), mitems.iterator) val qitc = @@ -62,7 +60,7 @@ class SummingIteratorTest extends AnyPropSpec with ScalaCheckPropertyChecks with property("With Maps is preserved[(Short,String)]") { forAll { (cap: Capacity, items: List[(Short, String)]) => - val mitems = items.map { Map(_) } + val mitems = items.map(Map(_)) val qit = SummingIterator(SummingQueue[Map[Short, String]](cap.c), mitems.iterator) val qitc = SummingIterator(SummingCache[Short, String](cap.c), mitems.iterator) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala index 0170444fb..78c661ad7 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala @@ -38,22 +38,18 @@ class SummingCacheTest extends CheckProperties { // Maps are tricky to compare equality for since zero values are often removed def test[K, V: Monoid](c: Capacity, items: List[(K, V)]) = { val sc = newCache[K, V](c) - val mitems = items.map { Map(_) } + val mitems = items.map(Map(_)) implicit val mapEq = mapEquiv[K, V] StatefulSummerLaws.sumIsPreserved(sc, mitems) && StatefulSummerLaws.isFlushedIsConsistent(sc, mitems) } property("puts are like sums (Int, Int)") { - forAll { (c: Capacity, items: List[(Int, Int)]) => - test(c, items) - } + forAll((c: Capacity, items: List[(Int, Int)]) => test(c, items)) } // String is not commutative: property("puts are like sums (Int, List[Int])") { - forAll { (c: Capacity, items: List[(Int, List[Int])]) => - test(c, items) - } + forAll((c: Capacity, items: List[(Int, List[Int])]) => test(c, items)) } } @@ -71,8 +67,8 @@ class SummingWithHitsCacheTest extends SummingCacheTest { def getHits[K, V: Monoid](c: Capacity, items: List[(K, V)]) = { val sc = SummingWithHitsCache[K, V](c.cap) - val mitems = items.map { Map(_) } - mitems.map { sc.putWithHits(_)._1 }.tail + val mitems = items.map(Map(_)) + mitems.map(sc.putWithHits(_)._1).tail } property("hit rates will always be 1 for stream with the same key") { @@ -80,7 +76,7 @@ class SummingWithHitsCacheTest extends SummingCacheTest { // Only run this when we have at least 2 items and non-zero cap (values.size > 1 && c.cap > 1) ==> { val key = RAND.nextInt - val items = values.map { (key, _) } + val items = values.map((key, _)) val keyHits = getHits(c, items) !keyHits.exists(_ != 1) } @@ -113,17 +109,13 @@ class SummingQueueTest extends CheckProperties { val zeroCapQueue = SummingQueue[Int](0) // passes all through property("0 capacity always returns") { - forAll { i: Int => - zeroCapQueue(i) == Some(i) - } + forAll { i: Int => zeroCapQueue(i) == Some(i) } } val sb = SummingQueue[Int](3) // buffers three at a time property("puts are like sums") { - forAll { (items: List[Int]) => - StatefulSummerLaws.sumIsPreserved(sb, items) - } + forAll((items: List[Int]) => StatefulSummerLaws.sumIsPreserved(sb, items)) } property("puts are like sums(String)") { @@ -134,21 +126,17 @@ class SummingQueueTest extends CheckProperties { } property("isFlushed is consistent") { - forAll { (items: List[Int]) => - StatefulSummerLaws.isFlushedIsConsistent(sb, items) - } + forAll((items: List[Int]) => StatefulSummerLaws.isFlushedIsConsistent(sb, items)) } property("puts return None sometimes") { forAll { (items: List[Int]) => // Should be: true, true, true, false, true, true, true, false sb.flush - val empties = items.map { sb.put(_).isEmpty } + val empties = items.map(sb.put(_).isEmpty) val correct = Stream .continually(Stream(true, true, true, false)) - .flatMap { s => - s - } + .flatMap(s => s) .take(empties.size) .toList empties == correct diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala index bacce91ab..e4e15932e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala @@ -30,7 +30,7 @@ class TopKTests extends CheckProperties { implicit def qmonoid = new PriorityQueueMonoid[Int](SIZE) implicit def queueArb = Arbitrary { - implicitly[Arbitrary[List[Int]]].arbitrary.map { qmonoid.build(_) } + implicitly[Arbitrary[List[Int]]].arbitrary.map(qmonoid.build(_)) } def q2l(q: PriorityQueue[Int]): List[Int] = q.iterator.asScala.toList.sorted @@ -40,15 +40,11 @@ class TopKTests extends CheckProperties { def pqIsCorrect(items: List[List[Int]]): Boolean = { val correct = items.flatten.sorted.take(SIZE) // Have to do this last since this monoid is mutating inputs - q2l(Monoid.sum(items.map { l => - qmonoid.build(l) - })) == correct + q2l(Monoid.sum(items.map(l => qmonoid.build(l)))) == correct } property("PriorityQueueMonoid works") { - forAll { (items: List[List[Int]]) => - pqIsCorrect(items) - } + forAll((items: List[List[Int]]) => pqIsCorrect(items)) } /** @@ -67,7 +63,7 @@ class TopKTests extends CheckProperties { implicit def tkmonoid = new TopKMonoid[Int](SIZE) implicit def topkArb = Arbitrary { - implicitly[Arbitrary[List[Int]]].arbitrary.map { tkmonoid.build(_) } + implicitly[Arbitrary[List[Int]]].arbitrary.map(tkmonoid.build(_)) } property("TopKMonoid works") { @@ -75,9 +71,7 @@ class TopKTests extends CheckProperties { val correct = its.flatten.sorted.take(SIZE) Equiv[List[Int]].equiv( Monoid - .sum(its.map { l => - tkmonoid.build(l) - }) + .sum(its.map(l => tkmonoid.build(l))) .items, correct ) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala index cc8b8098d..b82d32c63 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala @@ -11,7 +11,7 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { val data = List(1, 3, 2, 0, 5, 6) val MinAgg = Aggregator.min[Int] - val longData = data.map { _.toLong } + val longData = data.map(_.toLong) val SizeAgg = Aggregator.size "GeneratedTupleAggregators" should { @@ -2086,7 +2086,7 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { "MapAggregator" should { - val MinLongAgg = Aggregator.min[Int].andThenPresent { _.toLong } + val MinLongAgg = Aggregator.min[Int].andThenPresent(_.toLong) "Create an aggregator from 1 (key, aggregator) pair" in { val agg: MapMonoidAggregator[Int, Long, String, Long] = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala index 537e9a9b2..1665a292e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala @@ -32,7 +32,7 @@ class VectorSpaceProperties extends CheckProperties { } } - implicit val genDouble = Arbitrary { Gen.choose(-1.0e50, 1.0e50) } + implicit val genDouble = Arbitrary(Gen.choose(-1.0e50, 1.0e50)) property("map int double scaling") { vectorSpaceLaws[Double, ({ type x[a] = Map[Int, a] })#x](mapEqFn(_, _)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala index 60d221acb..aaf772f75 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala @@ -19,7 +19,7 @@ class WindowLaws extends CheckProperties { } yield Window.fromIterable(as) } - property("Window obeys monoid laws using a group") { monoidLaws[Window[Int]] } + property("Window obeys monoid laws using a group")(monoidLaws[Window[Int]]) property("Window obeys monoid laws using a monoid") { implicit val mon = Window.monoid[String](5) monoidLaws[Window[String]] @@ -62,7 +62,7 @@ class WindowTest extends CheckProperties { forAll { (ts0: List[Int], pn: PosNum[Int]) => val n = pn.value val mon = Window.monoid[Int](n) - val got = mon.sumOption(ts0.map { Window(_) }) + val got = mon.sumOption(ts0.map(Window(_))) val trunc = Queue(ts0.takeRight(n): _*) val expected = if (ts0.size == 0) None else Some(Window(trunc.sum, trunc)) expected == got diff --git a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala index 428c92be9..825f8cccf 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala @@ -12,14 +12,14 @@ class StatisticsRingLaws extends CheckProperties with Matchers { implicit val statsRing = new StatisticsRing[Int] implicit val arb = Arbitrary(for (v <- choose(0, 1 << 30)) yield v) - property("StatisticsRing is a Ring") { ringLaws[Int] } + property("StatisticsRing is a Ring")(ringLaws[Int]) } class StatisticsMonoidLaws extends CheckProperties with Matchers { implicit val statsMonoid = new StatisticsMonoid[Int] implicit val arb = Arbitrary(for (v <- choose(0, 1 << 14)) yield v) - property("StatisticsMonoid is a Monoid") { monoidLaws[Int] } + property("StatisticsMonoid is a Monoid")(monoidLaws[Int]) } class StatisticsTest extends AnyWordSpec with Matchers { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala index 491e2e9c2..268c8e6a4 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala @@ -29,9 +29,7 @@ class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { / def plus(older: PromiseLink[V], newer: PromiseLink[V]): PromiseLink[V] = { val (PromiseLink(p1, v1), PromiseLink(p2, v2)) = (older, newer) - p2.foreach { futureV => - Tunnel.properPromiseUpdate(p1, monoid.plus(futureV, v2)) - } + p2.foreach(futureV => Tunnel.properPromiseUpdate(p1, monoid.plus(futureV, v2))) PromiseLink(p2, monoid.plus(v1, v2)) } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala index 49504b067..67047fe73 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala @@ -38,7 +38,7 @@ class TunnelMonoid[V] extends Monoid[Tunnel[V]] { def plus(older: Tunnel[V], newer: Tunnel[V]): Tunnel[V] = { val (Tunnel(f1, p1), Tunnel(f2, p2)) = (older, newer) - f2.foreach { Tunnel.properPromiseUpdate(p1, _) } + f2.foreach(Tunnel.properPromiseUpdate(p1, _)) Tunnel(f1, p2) } } @@ -78,7 +78,7 @@ object Tunnel { */ def toIncrement[V](v: V)(implicit monoid: Monoid[V]) = { val promise = new Promise[V] - Tunnel(promise.map { monoid.plus(_, v) }, promise) + Tunnel(promise.map(monoid.plus(_, v)), promise) } /** diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala index b5596afc9..12a90551c 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala @@ -46,7 +46,7 @@ class AsyncListMMapSum[Key, Value]( protected override val emptyResult = Map.empty[Key, Value] - override def isFlushed: Boolean = mutex.synchronized { presentTuples == 0 } + override def isFlushed: Boolean = mutex.synchronized(presentTuples == 0) override def flush: Future[Map[Key, Value]] = workPool { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala index 78da84c9c..6eae7e13c 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala @@ -97,9 +97,7 @@ class AsyncListSum[Key, Value]( }.toSeq) lFuts .map(_.toMap) - .foreach { r => - tuplesOut.incrBy(r.size) - } + .foreach(r => tuplesOut.incrBy(r.size)) }.flatten @annotation.tailrec diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala index f3f00b4f7..e5b1635f5 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala @@ -33,9 +33,7 @@ trait AsyncSummer[T, +M <: Iterable[T]] { self => new AsyncSummerProxy[T, M] { override val self = oldSelf override def cleanup = - oldSelf.cleanup.flatMap { _ => - cleanupFn() - } + oldSelf.cleanup.flatMap(_ => cleanupFn()) } } } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala index 31177f5e0..ba6fc0bfc 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala @@ -52,9 +52,7 @@ class ApproxHHTracker(hhPct: HeavyHittersPercent, updateFreq: UpdateFrequency, r private[this] final val hashes: IndexedSeq[CMSHash[Long]] = { val r = new scala.util.Random(5) - (0 until DEPTH).map { _ => - CMSHash[Long](r.nextInt, 0, WIDTH) - } + (0 until DEPTH).map(_ => CMSHash[Long](r.nextInt, 0, WIDTH)) }.toIndexedSeq @inline @@ -249,9 +247,7 @@ class HeavyHittersCachingSummer[K, V]( def addAll(vals: TraversableOnce[T]): Future[Iterable[T]] = { //todo not sure if need to increment as backing summer may already be doing it insertOp.incr - val (hh, nonHH) = approxHH.splitTraversableOnce(vals, { t: T => - t._1.hashCode - }) + val (hh, nonHH) = approxHH.splitTraversableOnce(vals, { t: T => t._1.hashCode }) if (!hh.isEmpty) { backingSummer.addAll(hh).map { fResp => diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala index df927e668..77a6bbf29 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala @@ -106,5 +106,5 @@ class CustomSummingQueue[V](capacity: Int, sizeIncr: Incrementor, putCalls: Incr queue.drainTo(toSum.asJava) Semigroup.sumOption(toSum) } - def isFlushed: Boolean = queueOption.map { _.size == 0 }.getOrElse(true) + def isFlushed: Boolean = queueOption.map(_.size == 0).getOrElse(true) } diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/PromiseLinkMonoidProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/PromiseLinkMonoidProperties.scala index 6aad562f8..e0d079470 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/PromiseLinkMonoidProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/PromiseLinkMonoidProperties.scala @@ -23,7 +23,7 @@ class PromiseLinkMonoidProperties extends CheckProperties { def makeTunnel(seed: Int) = PromiseLink.toPromiseLink(seed) def collapseFinalValues(finalTunnel: PromiseLink[Int], tunnels: Seq[PromiseLink[Int]], toFeed: Int) = { finalTunnel.completeWithStartingValue(toFeed) - finalTunnel.promise +: tunnels.map { _.promise } + finalTunnel.promise +: tunnels.map(_.promise) } TunnelMonoidProperties.testTunnelMonoid(identity, makeTunnel, collapseFinalValues) diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala index b2dd85ed2..3ce5a973a 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala @@ -27,9 +27,7 @@ object TunnelMonoidProperties { collapseFinalValues: (V, Seq[V], I) => Seq[Future[I]] ) = { val r = new Random - val numbers = (1 to 40).map { _ => - makeRandomInput(r.nextInt) - } + val numbers = (1 to 40).map(_ => makeRandomInput(r.nextInt)) def helper(seeds: Seq[I], toFeed: I) = { val tunnels = seeds.map(makeTunnel) @annotation.tailrec @@ -56,7 +54,7 @@ object TunnelMonoidProperties { b2 <- f2 } yield b1 == b2 } - Await.result(Future.collect(finalResults).map { _.forall(identity) }) + Await.result(Future.collect(finalResults).map(_.forall(identity))) } } } @@ -72,7 +70,7 @@ class TunnelMonoidPropertiesextends extends CheckProperties { property("associative") { def makeTunnel(seed: Int) = Tunnel.toIncrement(seed) def collapseFinalValues(finalTunnel: Tunnel[Int], tunnels: Seq[Tunnel[Int]], toFeed: Int) = - finalTunnel(toFeed) +: tunnels.map { _.future } + finalTunnel(toFeed) +: tunnels.map(_.future) testTunnelMonoid[Int, Tunnel[Int]](identity, makeTunnel, collapseFinalValues) } diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala index a1139fc34..557e6e681 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala @@ -32,33 +32,25 @@ object AsyncSummerLaws { implicit def arbFlushFreq = Arbitrary { Gen .choose(1, 4000) - .map { x: Int => - FlushFrequency(Duration.fromMilliseconds(x)) - } + .map { x: Int => FlushFrequency(Duration.fromMilliseconds(x)) } } implicit def arbBufferSize = Arbitrary { Gen .choose(1, 10) - .map { x => - BufferSize(x) - } + .map(x => BufferSize(x)) } implicit def arbMemoryFlushPercent = Arbitrary { Gen .choose(80.0f, 90.0f) - .map { x => - MemoryFlushPercent(x) - } + .map(x => MemoryFlushPercent(x)) } implicit def arbCompactSize = Arbitrary { Gen .choose(1, 10) - .map { x => - CompactionSize(x) - } + .map(x => CompactionSize(x)) } def sample[T: Arbitrary]: T = Arbitrary.arbitrary[T].sample.get diff --git a/build.sbt b/build.sbt index 86f71e747..22e268ad3 100644 --- a/build.sbt +++ b/build.sbt @@ -81,9 +81,7 @@ val sharedSettings = Seq( releaseVersionBump := sbtrelease.Version.Bump.Minor, // need to tweak based on mima results publishMavenStyle := true, publishArtifact in Test := false, - pomIncludeRepository := { x => - false - }, + pomIncludeRepository := { x => false }, releaseProcess := Seq[ReleaseStep]( checkSnapshotDependencies, inquireVersions, @@ -92,7 +90,9 @@ val sharedSettings = Seq( setReleaseVersion, commitReleaseVersion, tagRelease, - releaseStepCommandAndRemaining("+publishSigned"), // formerly publishArtifacts, here to deal with algebird-spark + releaseStepCommandAndRemaining( + "+publishSigned" + ), // formerly publishArtifacts, here to deal with algebird-spark ReleaseStep(action = releaseStepCommand("sonatypeBundleRelease")), setNextVersion, commitNextVersion, @@ -212,9 +212,7 @@ val noBinaryCompatCheck = Set[String]("benchmark", "caliper", "generic", "spark" def previousVersion(subProj: String) = Some(subProj) .filterNot(noBinaryCompatCheck.contains) - .map { s => - "com.twitter" %% ("algebird-" + s) % "0.13.5" - } + .map(s => "com.twitter" %% ("algebird-" + s) % "0.13.5") lazy val algebird = Project(id = "algebird", base = file(".")) .settings(sharedSettings) @@ -313,7 +311,9 @@ lazy val algebirdSpark = module("spark") .settings( libraryDependencies += "org.apache.spark" %% "spark-core" % sparkVersion % "provided", scalacOptions := scalacOptions.value - .filterNot(_.contains("inline")) // Disable optimizations for now: https://github.com/scala/bug/issues/11247 + .filterNot( + _.contains("inline") + ) // Disable optimizations for now: https://github.com/scala/bug/issues/11247 ) .dependsOn(algebirdCore, algebirdTest % "test->test") From 3a27bdf43bbf92bb4f896708615bd2bd8ab04f91 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 9 Mar 2020 14:13:56 +0100 Subject: [PATCH 040/306] Update util-core to 20.3.0 (#795) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 22e268ad3..582db08bf 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.1.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.4" -val utilVersion = "20.1.0" +val utilVersion = "20.3.0" val sparkVersion = "2.4.5" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From ccccc00703f7bb1059a1ccc2f1892499274fde82 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 10 Mar 2020 13:15:24 +0100 Subject: [PATCH 041/306] Update sbt-microsites to 1.1.3 (#796) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index dc0680efc..6e740b798 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.2") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.3") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") From 26082606b6d79376f567d1c75f760536f22a61f9 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 19 Mar 2020 14:30:44 +0100 Subject: [PATCH 042/306] Update sbt-scalafix to 0.9.12 (#797) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 6e740b798..c3a42303c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.11") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.12") From 3e535546211a0f8ac55b52c770e1df8d6a18b2dd Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 29 Mar 2020 15:43:33 +0200 Subject: [PATCH 043/306] Update sbt-sonatype to 3.9.2 (#800) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index c3a42303c..ed76ebc25 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,6 +13,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.8.1") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.12") From d8071fd11a81512f4b577a5529a796dd83660a14 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 29 Mar 2020 16:00:25 +0200 Subject: [PATCH 044/306] Update sbt-microsites to 1.1.5 (#798) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index ed76ebc25..92b59f41c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.3") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.5") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") From 10aff1804f9c7e7dc6eeb9c0b966a430f882ebd3 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 10 Apr 2020 18:51:40 +0200 Subject: [PATCH 045/306] Update sbt-scalafmt to 2.3.4 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 92b59f41c..0a61d202f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.5") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.2") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.4") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") From 3c673e0e560c55cd3ad2c4743dc94e6eb97f3c6c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 31 Mar 2020 20:56:03 +0200 Subject: [PATCH 046/306] Update sbt to 1.3.9 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index a919a9b5f..06703e34d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.8 +sbt.version=1.3.9 From bf9e618fd6c1919fe991f4dd6daf7c511a691430 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 10 Apr 2020 19:33:35 +0200 Subject: [PATCH 047/306] Update sbt-scalafix to 0.9.13 (#801) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 0a61d202f..c98e0114e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.12") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.13") From 5b48fb720654b27d297f1f5b7918296a3e629b2b Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 10 Apr 2020 19:34:03 +0200 Subject: [PATCH 048/306] Update util-core to 20.4.0 (#803) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 582db08bf..b37462926 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.1.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.4" -val utilVersion = "20.3.0" +val utilVersion = "20.4.0" val sparkVersion = "2.4.5" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From dc72b19fb458cc27664f1633dd0ead42c21c6abb Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 15 Apr 2020 22:23:49 +0200 Subject: [PATCH 049/306] Update sbt to 1.3.10 (#807) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 06703e34d..797e7ccfd 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.9 +sbt.version=1.3.10 From 57ec9ceb88dad34b9094de053c60adc28df426be Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 16 Apr 2020 19:49:40 +0200 Subject: [PATCH 050/306] Update scala-collection-compat to 2.1.5 (#808) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index b37462926..a74152cee 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.1.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" -val scalaCollectionCompat = "2.1.4" +val scalaCollectionCompat = "2.1.5" val utilVersion = "20.4.0" val sparkVersion = "2.4.5" From b34d4cabfd70ee9aa8ecc7f3c6838079488820a1 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 16 Apr 2020 19:50:01 +0200 Subject: [PATCH 051/306] Update sbt-scalafix to 0.9.14 (#806) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index c98e0114e..980dc44a2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.13") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.14") From 2c2a2d77a497e84b7608e93905fb57146dba6243 Mon Sep 17 00:00:00 2001 From: Erik Osheim Date: Fri, 17 Apr 2020 23:19:52 -0400 Subject: [PATCH 052/306] Add DecayingCMS[K] type. (#809) * Add DecayingCMS[K] type. This type represents a count-min sketch whose values decay over time according to a provided half-life. It uses a module-style approach, where the half-life, width, depth, and other parameters are set in a module. The actual CMS values are path-dependent types on the DecayingCMS[K] module value. This means that we don't have to carry around or serialize anything besides the count-min values themselves, and also means we can be sure that CMS values to be combined are aligned. It's a slightly different pattern that people may be used to, but I think it works much better for this kind of thing. (I could imagine creating a non-decaying variant with a similar design, but that's outside the scope of this PR.) * Fix flaky test. I had increased eps from 1e-5 to 1e-6 before submitting. Since ScalaTest only runs 10 iterations by default it seemed OK to me but ended up being flaky. I've confirmed that 1e-5 works with thousands of runs. * Formatting! * Remove stray SBT setting and format again. * Fix SBT formatting. Co-authored-by: Erik Osheim --- .../com/twitter/algebird/DecayingCMS.scala | 688 ++++++++++++++++++ .../twitter/algebird/DecayingCMSTest.scala | 435 +++++++++++ 2 files changed, 1123 insertions(+) create mode 100644 algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala create mode 100644 algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala new file mode 100644 index 000000000..56e6b4469 --- /dev/null +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala @@ -0,0 +1,688 @@ +package com.twitter.algebird + +import java.lang.Double.{compare => cmp} +import java.lang.Math +import java.util.Arrays.deepHashCode +import scala.concurrent.duration.Duration +import scala.util.Random + +/** + * DecayingCMS is a module to build count-min sketch instances whose + * counts decay exponentially. + * + * Similar to a Map[K, com.twitter.algebird.DecayedValue], each key is + * associated with a single count value that decays over time. Unlike + * a map, the decyaing CMS is an approximate count -- in exchange for + * the possibility of over-counting, we can bound its size in memory. + * + * The intended use case is for metrics or machine learning where + * exact values aren't needed. + * + * You can expect the keys with the biggest values to be fairly + * accurate but the very small values (rare keys or very old keys) to + * be lost in the noise. For both metrics and ML this should be fine: + * you can't learn too much from very rare values. + * + * We recommend depth of at least 5, and width of at least 100, but + * you should do some experiments to determine the smallest parameters + * that will work for your use case. + */ +final class DecayingCMS[K]( + seed: Long, + val halfLife: Duration, + val depth: Int, // number of hashing functions + val width: Int, // number of table cells per hashing function + hasher: CMSHasher[K] +) extends Serializable { module => + + override def toString: String = + s"DecayingCMS(seed=$seed, halfLife=$halfLife, depth=$depth, width=$width)" + + @inline private def getNextLogScale( + logScale: Double, + oldTimeInHL: Double, + nowInHL: Double + ): Double = + if (nowInHL == oldTimeInHL) logScale else logScale + (nowInHL - oldTimeInHL) * log2 + + @inline private def getScale(logScale: Double, oldTimeInHL: Double, nowInHL: Double): Double = { + val logScale1 = getNextLogScale(logScale, oldTimeInHL, nowInHL) + Math.exp(-logScale1) + } + + val empty: CMS = + new CMS(Array.fill(depth)(Vector.fill[Double](width)(0.0)), 0.0, Double.NegativeInfinity) + + /** + * Represents a decaying scalar value at a particular point in time. + * + * The value decays according to halfLife. Another way to think + * about DoubleAt is that it represents a particular decay curve + * (and in particular, a point along that curve). Two DoubleAt + * values may be equivalent if they are two points on the same curve. + * + * The `timeToZero` and `timeToUnit` methods can be used to + * "normalize" DoubleAt values. If two DoubleAt values do not + * produce the same (approximate) Double values from these methods, + * they represent different curves. + */ + class DoubleAt private[algebird] (val value: Double, val timeInHL: Double) extends Serializable { + lhs => + + // this is not public because it's not safe in general -- you need + // to run a function that is time-commutative. + private[algebird] def map(f: Double => Double): DoubleAt = + new DoubleAt(f(value), timeInHL) + + // this is not public because it's not safe in general -- you need + // to run a function that is time-commutative. + private[algebird] def map2(rhs: DoubleAt)(f: (Double, Double) => Double): DoubleAt = + if (lhs.timeInHL < rhs.timeInHL) { + val x = lhs.scaledAt(rhs.timeInHL) + new DoubleAt(f(x, rhs.value), rhs.timeInHL) + } else if (lhs.timeInHL == rhs.timeInHL) { + new DoubleAt(f(lhs.value, rhs.value), rhs.timeInHL) + } else { + val y = rhs.scaledAt(lhs.timeInHL) + new DoubleAt(f(lhs.value, y), lhs.timeInHL) + } + + def unary_- : DoubleAt = new DoubleAt(-value, timeInHL) + def abs: DoubleAt = new DoubleAt(Math.abs(value), timeInHL) + def *(n: Double): DoubleAt = new DoubleAt(value * n, timeInHL) + + def +(rhs: DoubleAt): DoubleAt = map2(rhs)(_ + _) + def -(rhs: DoubleAt): DoubleAt = map2(rhs)(_ - _) + def min(rhs: DoubleAt): DoubleAt = map2(rhs)(Math.min) + def max(rhs: DoubleAt): DoubleAt = map2(rhs)(Math.max) + + def /(rhs: DoubleAt): Double = map2(rhs)(_ / _).value + + /** + * We consider two DoubleAt values equal not just if their + * elements are equal, but also if they represent the same value + * at different points of decay. + */ + def compare(rhs: DoubleAt): Int = { + val vc = cmp(lhs.value, rhs.value) + val tc = cmp(lhs.timeInHL, rhs.timeInHL) + if (vc == tc) vc + else if (tc == 0) vc + else if (vc == 0) tc + else if (tc < 0) cmp(lhs.scaledAt(rhs.timeInHL), rhs.value) + else cmp(lhs.value, rhs.scaledAt(lhs.timeInHL)) + } + + /** + * Time when this value will reach the smallest double value + * bigger than zero, unless we are already at zero in which + * case we return the current time + */ + def timeToZero: Double = + if (java.lang.Double.isNaN(value)) Double.NaN + else if (java.lang.Double.isInfinite(value)) Double.PositiveInfinity + else if (value == 0.0) timeInHL + else timeToUnit + DoubleAt.TimeFromUnitToZero + + /** + * This is the scaled time when the current value will reach + * 1 (or -1 for negative values) + * + * This method is a way of collapsing a DoubleAt into a single + * value (the time in the past or future where its value would be + * 1, the unit value). + */ + def timeToUnit: Double = + if (java.lang.Double.isNaN(value)) Double.NaN + else if (java.lang.Double.isInfinite(value)) Double.PositiveInfinity + else if (value == 0.0) Double.NegativeInfinity + else { + // solve for result: + // + // 1 = value * module.getScale(0.0, timeInHL, result) + // 1 = value * Math.exp(-getNextLogScale(0.0, timeInHL, result)) + // 1 / value = Math.exp(-getNextLogScale(0.0, timeInHL, result)) + // log(1 / value) = -getNextLogScale(0.0, timeInHL, result) + // -log(1 / value) = getNextLogScale(0.0, timeInHL, result) + // log(value) = getNextLogScale(0.0, timeInHL, result) + // log(value) = if (result == timeInHL) 0 else 0 + (result - timeInHL) * log2 + // log(value) = if (result == timeInHL) 0 else (result - timeInHL) * log2 + // + // log(value) = (result - timeInHL) * log2 + // log(value) / log2 = result - timeInHL + // log(value) / log2 + timeInHL = result + Math.log(Math.abs(value)) / log2 + timeInHL + } + + override def equals(that: Any): Boolean = + that match { + case d: DoubleAt => compare(d) == 0 + case _ => false + } + + override def hashCode: Int = + timeToUnit.## + + override def toString: String = + s"DoubleAt($value, $timeInHL)" + + def <(rhs: DoubleAt): Boolean = (lhs.compare(rhs)) < 0 + def <=(rhs: DoubleAt): Boolean = (lhs.compare(rhs)) <= 0 + def >(rhs: DoubleAt): Boolean = (lhs.compare(rhs)) > 0 + def >=(rhs: DoubleAt): Boolean = (lhs.compare(rhs)) >= 0 + + def time: Long = + toTimestamp(timeInHL) + + private def scaledAt(t: Double): Double = + if (value == 0.0) 0.0 + else value * module.getScale(0.0, timeInHL, t) + + def at(time: Long): Double = + if (value == 0.0) 0.0 + else value * module.getScale(0.0, timeInHL, fromTimestamp(time)) + } + + object DoubleAt { + def apply(x: Double, t: Long): DoubleAt = + new DoubleAt(x, fromTimestamp(t)) + + val zero: DoubleAt = + new DoubleAt(0.0, Double.NegativeInfinity) + + private val TimeFromUnitToZero: Double = + -Math.log(Double.MinPositiveValue) / log2 + } + + val totalCells: Int = depth * width + + val halfLifeSecs: Double = + halfLife.toMillis.toDouble / 1000.0 + + // TODO: consider a smaller number? + // we are trading accuracy for possible performence + private[this] val maxLogScale: Double = 20.0 + + /** + * Allocate an empty array of row. + * + * The elements start as null. It's an important optimization _not_ + * to allocate vectors here, since we're often building up cells + * mutably. + */ + private def allocCells(): Array[Vector[Double]] = + new Array[Vector[Double]](depth) + + def toTimestamp(t: Double): Long = + (t * halfLifeSecs * 1000.0).toLong + + def fromTimestamp(t: Long): Double = + (t.toDouble / 1000.0) / halfLifeSecs + + val hashFns: Array[K => Int] = { + val rng = new Random(seed) + def genPos(): Int = + rng.nextInt match { + case 0 => genPos() + case n => n & 0x7fffffff + } + + (0 until depth).map { _ => + val n = genPos() + (k: K) => hasher.hash(n, 0, width)(k) + }.toArray + } + + private final val log2 = Math.log(2.0) + + /** + * The idealized formula for the updating current value for a key + * (y0 -> y1) is given as: + * + * delta = (t1 - t0) / halflife + * y1 = y0 * 2^(-delta) + n + * + * However, we want to avoid having to rescale every single cell + * every time we update; i.e. a cell with a zero value should + * continue to have a zero value when n=0. + * + * Therefore, we introduce a change of variable to cell values (z) + * along with a scale factor (scale), and the following formula: + * + * (1) zN = yN * scaleN + * + * Our constraint is expressed as: + * + * (2) If n=0, z1 = z0 + * + * In that case: + * + * (3) If n=0, (y1 * scale1) = (y0 * scale0) + * (4) Substituting for y1, (y0 * 2^(-delta) + 0) * scale1 = y0 * scale0 + * (5) 2^(-delta) * scale1 = scale0 + * (6) scale1 = scale0 * 2^(delta) + * + * Also, to express z1 in terms of z0, we say: + * + * (7) z1 = y1 * scale1 + * (8) z1 = (y0 * 2^(-delta) + n) * scale1 + * (9) z1 = ((z0 / scale0) * 2^(-delta) + n) * scale1 + * (10) z1 / scale1 = (z0 / (scale1 * 2^(-delta))) * 2^(-delta) + n + * (11) z1 / scale1 = z0 / scale1 + n + * (12) z1 = z0 + n * scale1 + * + * So, for cells where n=0, we just update scale0 to scale1, and for + * cells where n is non-zero, we update z1 in terms of z0 and + * scale1. + * + * If we convert scale to logscale, we have: + * + * (13) logscale1 = logscale0 + delta * log(2) + * (14) z1 = z0 + n * exp(logscale1) + * + * When logscale1 gets big, we start to distort z1. For example, + * exp(36) is close to 2^53. We can measure when n * exp(logscale1) + * gets big, and in those cases we can rescale all our cells (set + * each z to its corresponding y) and set the logscale to 0. + * + * (15) y1 = z1 / scale1 + * (16) y1 = z1 / exp(logscale1) + * (17) y1 = z1 * exp(-logscale1) + */ + final class CMS( + val cells: Array[Vector[Double]], + val logScale: Double, + val timeInHL: Double + ) extends Serializable { + + @inline private def scale: Double = + Math.exp(-logScale) + + override def toString: String = { + val s = cells.iterator.map(_.toString).mkString("Array(", ", ", ")") + s"CMS($s, $logScale, $timeInHL)" + } + + override def hashCode: Int = + deepHashCode(cells.asInstanceOf[Array[Object]]) * 59 + + logScale.## * 17 + + timeInHL.## * 37 + + 19 + + // unfortunately we can't check the path-dependent type of this + // CMS, which we signal by using a type projection here. + override def equals(any: Any): Boolean = + any match { + case that: DecayingCMS[_]#CMS => + this.logScale == that.logScale && + this.timeInHL == that.timeInHL && + this.cells.length == that.cells.length && { + var i = 0 + while (i < depth) { + if (this.cells(i) != that.cells(i)) return false + i += 1 + } + true + } + case _ => + false + } + + def lastUpdateTime: Long = + toTimestamp(timeInHL) + + /** + * Provide lower and upper bounds on values returned for any + * possible key. + * + * The first value is a lower bound: even keys that have never + * been counted will return this value or greater. This will be + * zero unless the CMS is saturated. + * + * The second value is an upper bound: the key with the largest + * cardinality will not be reported as being larger than this + * value (though it might be reported as being smaller). + * + * Together these values indicate how saturated and skewed the CMS + * might be. + */ + def range: (DoubleAt, DoubleAt) = { + var minMinimum = Double.PositiveInfinity + var minMaximum = Double.PositiveInfinity + var i = 0 + while (i < cells.length) { + val it = cells(i).iterator + var localMax = it.next // we know it doesn't start empty + if (localMax < minMinimum) minMinimum = localMax + while (it.hasNext) { + val n = it.next + if (n > localMax) localMax = n + else if (n < minMinimum) minMinimum = n + } + if (localMax < minMaximum) minMaximum = localMax + i += 1 + } + + val s = scale + def sc(x: Double): DoubleAt = + new DoubleAt(if (x == 0.0) 0.0 else x * s, timeInHL) + + (sc(minMinimum), sc(minMaximum)) + } + + /** + * Returns the square-root of the inner product of two decaying + * CMSs. + * + * We want the result to decay at the same rate as the CMS for + * this method to be valid. Taking the square root ensures that + * this is true. Without it, we would violate the following + * equality (assuming we had at() on a CMS): + * + * x.innerProduct(y).at(t) = x.at(t).innerProduct(y.at(t)) + * + * This is why we don't support innerProduct, only + * innerProductRoot. + */ + def innerProductRoot(that: CMS): DoubleAt = { + var i = 0 + var res = Double.PositiveInfinity + val t = Math.max(this.timeInHL, that.timeInHL) + val scale = this.getScale(t) * that.getScale(t) + while (i < depth) { + var sum = 0.0 + val it0 = this.cells(i).iterator + val it1 = that.cells(i).iterator + while (it0.hasNext) { + val x = it0.next * it1.next + if (x != 0.0) sum += x + } + if (sum < res) res = sum + i += 1 + } + val x = if (res != 0.0) Math.sqrt(res * scale) else 0.0 + new DoubleAt(x, t) + } + + def l2Norm: DoubleAt = + innerProductRoot(this) + + def scale(x: Double): CMS = + if (java.lang.Double.isNaN(x)) { + throw new IllegalArgumentException(s"invalid scale: $x") + } else if (x < 0.0) { + throw new IllegalArgumentException(s"negative scale is not allowed: $x") + } else if (x == 0.0) { + module.empty + } else { + val s = logScale + Math.log(x) + val c = new CMS(cells, s, timeInHL) + if (s > maxLogScale) c.rescaleTo(timeInHL) else c + } + + /** + * Get the total count of all items in the CMS. + * + * The total is the same as the l1Norm, since we don't allow + * negative values. + * + * Total is one of the few non-approximate statistics that + * DecayingCMS supports. We expect the total to be exact (except + * for floating-point error). + */ + def total: DoubleAt = { + val n = cells(0).sum + val x = if (n == 0.0) 0.0 else scale * n + new DoubleAt(x, timeInHL) + } + + def get(k: K): DoubleAt = { + var minValue = Double.PositiveInfinity + var didx = 0 + while (didx < depth) { + val i = hashFns(didx)(k) + val inner = cells(didx) + val value = inner(i) + if (value < minValue) minValue = value + didx += 1 + } + val x = if (minValue == 0.0) 0.0 else scale * minValue + new DoubleAt(x, timeInHL) + } + + def getScale(t: Double): Double = + module.getScale(logScale, timeInHL, t) + + private final def nextLogScale(t: Double): Double = + module.getNextLogScale(logScale, timeInHL, t) + + def +(other: CMS): CMS = { + val x = this + val y = other + val timeInHL = Math.max(x.timeInHL, y.timeInHL) + val cms = new CMS(allocCells, 0.0, timeInHL) + + val xscale = x.getScale(timeInHL) + val yscale = y.getScale(timeInHL) + + // a zero count is zero, no matter, how big the scale is. + @inline def prod(x: Double, y: Double): Double = + if (x == 0.0) 0.0 else x * y + + var i = 0 + while (i < depth) { + val left = x.cells(i) + val right = y.cells(i) + var j = 0 + val bldr = rowBuilder() + while (j < width) { + bldr += prod(left(j), xscale) + prod(right(j), yscale) + j += 1 + } + cms.cells(i) = bldr.result + i += 1 + } + cms + } + + def add(t: Long, k: K, n: Double): CMS = + scaledAdd(fromTimestamp(t), k, n) + + // TODO: we could allocate a mutable scratch pad, write all the + // values into it, and then build a CMS out of it. if items is + // very small, this would be less efficient than what we're doing + // now. probably the "ideal" solution would be determine how many + // items there are. if we have fewer than ~width items, this + // approach is fine. for more, a scratch pad would be better + // (assuming we wrote that code). + // + // alternately, you could map items into (zero + item) and then + // use the monoid's sum to boil it down. + // + // we only use this in testing currently so the current code is + // fine until we rely on it in production. any change here should + // probably include benchmarks justifying the design. + def bulkAdd(items: Iterable[(Long, K, Double)]): CMS = + items.foldLeft(this) { case (c, (t, k, v)) => c.add(t, k, v) } + + private[algebird] def scaledAdd(ts1: Double, k: K, n: Double): CMS = + if (n < 0.0) { + val t = toTimestamp(ts1) + throw new IllegalArgumentException( + s"we can only add non-negative numbers to a CMS, got $n for key: $k at time: $t" + ) + } else if (n == 0.0) { + this + } else { + val logScale1 = nextLogScale(ts1) + if (logScale1 > maxLogScale) { + rescaleTo(ts1).scaledAdd(ts1, k, n) + } else { + val increment = n * Math.exp(logScale1) + val cells1 = allocCells() + var didx = 0 + while (didx < depth) { + val cell = cells(didx) + val w = hashFns(didx)(k) + cells1(didx) = cell.updated(w, cell(w) + increment) + didx += 1 + } + new CMS(cells1, logScale1, ts1) + } + } + + // Set the scale back to 0.0 + // input time is in half-lives + private[algebird] def rescaleTo(ts: Double): CMS = { + val logScale1 = nextLogScale(ts) + val expL = Math.exp(-logScale1) + if (expL == 0.0) { + new CMS(monoid.zero.cells, 0.0, ts) + } else { + val cms = new CMS(allocCells, 0.0, ts) + var i = 0 + while (i < depth) { + val ci = cells(i) + cms.cells(i) = ci.map(_ * expL) + i += 1 + } + cms + } + } + } + + private def rowBuilder() = { + val bldr = Vector.newBuilder[Double] + bldr.sizeHint(width) + bldr + } + + object CMS { + + implicit val monoidForCMS: Monoid[CMS] = + new Monoid[CMS] { + + def zero: CMS = module.empty + + def plus(x: CMS, y: CMS): CMS = + x + y + + /** + * Turn a flat array into an array of vectors. + */ + private def scratchToCells(scratch: Array[Double]): Array[Vector[Double]] = { + val cells = new Array[Vector[Double]](depth) + var i = 0 + while (i < depth) { + var j = i * width + val limit = j + width + val bldr = rowBuilder() + while (j < limit) { + bldr += scratch(j) + j += 1 + } + cells(i) = bldr.result + i += 1 + } + cells + } + + /** + * This method sums the first `num` items in `arr`. + */ + private def innerSum(arr: Array[CMS], num: Int): CMS = + if (num == 0) zero + else if (num == 1) arr(0) + else if (num == 2) plus(arr(0), arr(1)) + else { + // start with zero + val scratch: Array[Double] = new Array(totalCells) + + val latestTimeInHL: Double = + arr.iterator.take(num).map(cms => cms.timeInHL).max + + var i = 0 + while (i < num) { + val cms = arr(i) + val scale = cms.getScale(latestTimeInHL) + var j = 0 + while (j < depth) { + val row = cms.cells(j) + val stride = j * width + var k = 0 + while (k < width) { + val n = row(k) + if (n > 0.0) { + scratch(stride + k) += scale * n + } + k += 1 + } + j += 1 + } + i += 1 + } + + val cells = scratchToCells(scratch) + + new CMS(cells, 0.0, latestTimeInHL) + } + + override def sumOption(xs: TraversableOnce[CMS]): Option[CMS] = { + + val it: Iterator[CMS] = xs.toIterator + val ChunkSize = 1000 + + // the idea here is that we read up to 1000 CMS values into + // a fixed array, crunch them down to a single CMS, store it + // in the first array index, read up to 999 more CMS values + // in, crunch them down, and so on. + var i = 0 + val arr = new Array[CMS](ChunkSize) + while (it.hasNext) { + while (it.hasNext && i < ChunkSize) { + arr(i) = it.next + i += 1 + } + if (i > 1) { + arr(0) = innerSum(arr, i) + } + i = 1 + } + if (i == 0) None else Some(arr(0)) + } + } + } + + val monoid: Monoid[CMS] = CMS.monoidForCMS +} + +object DecayingCMS { + + /** + * Construct a DecayingCMS module. + * + * The seed is used to initialize the hash families used by the + * count-min sketch. Using the same seed will always produce the + * same hash family. + * + * Half-life determines the rate at which values in the CMS decay. + * If a key was counted once at time t, by time (t + halfLife), the + * value for that key will be 0.5. After enough half lives the value + * will decay to zero. + * + * The size of the CMS in bytes is O(depth * width). + * + * Width controls the relative error due to over-counting + * (approximately 1/width). For 1% error, use width=100, for 0.1% + * error, use width=1000, etc. + * + * Depth controls the probability the error bounds are broken and + * that probability scales with exp(-alpha * depth) so, a small depth + * (e.g. 5-10) is fine. Each update requires O(depth) work so you + * want to keep this as small as possible. + */ + def apply[K](seed: Long, halfLife: Duration, depth: Int, width: Int)( + implicit hasher: CMSHasher[K] + ): DecayingCMS[K] = + new DecayingCMS(seed, halfLife, depth, width, hasher) +} diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala new file mode 100644 index 000000000..c2099646e --- /dev/null +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala @@ -0,0 +1,435 @@ +package com.twitter.algebird + +import org.scalacheck.{Arbitrary, Gen, Prop} +import scala.concurrent.duration.{DAYS, Duration, HOURS, MINUTES} + +import Prop.{forAllNoShrink => forAll} + +class DecayingCMSProperties extends CheckProperties { + + // uncomment to stress test (scalatest default is 10) + // override val generatorDrivenConfig = + // PropertyCheckConfiguration(minSuccessful = 1000) + + val eps = 1e-5 + + def close(a: Double, b: Double): Boolean = + if (a == b) { + true + } else { + val (aa, ab) = (Math.abs(a), Math.abs(b)) + if (aa < eps && ab < eps) true + else if (aa < ab) (b / a) < 1.0 + eps + else (a / b) < 1.0 + eps + } + + def fuzzyEq[K](module: DecayingCMS[K])(cms0: module.CMS, cms1: module.CMS): Boolean = { + val t = cms0.timeInHL.max(cms1.timeInHL) + val (x0, x1) = + if (t == Double.NegativeInfinity) (cms0, cms1) + else (cms0.rescaleTo(t), cms1.rescaleTo(t)) + + (0 until module.depth).forall { d => + (0 until module.width).forall(w => close(x0.cells(d)(w), x1.cells(d)(w))) + } + } + + def genModule[K: CMSHasher]: Gen[DecayingCMS[K]] = + for { + seed <- Gen.choose(1L, Long.MaxValue) + hl <- genDuration + d <- Gen.choose(1, 5) + w <- Gen.choose(1, 12) + } yield DecayingCMS[K](seed, hl, d, w) + + def genBigModule[K: CMSHasher]: Gen[DecayingCMS[K]] = + for { + seed <- Gen.choose(1L, Long.MaxValue) + hl <- genDuration + d <- Gen.const(6) + w <- Gen.choose(100, 200) + } yield DecayingCMS[K](seed, hl, d, w) + + implicit def arbitraryModule[K: CMSHasher]: Arbitrary[DecayingCMS[K]] = + Arbitrary(genModule) + + def genCms[K]( + module: DecayingCMS[K], + genk: Gen[K], + gent: Gen[Long], + genv: Gen[Double] + ): Gen[module.CMS] = { + + val genEmpty = Gen.const(module.monoid.zero) + val genItem = Gen.zip(gent, genk, genv) + + def genSeq(cms0: module.CMS): Gen[module.CMS] = + Gen.listOf(genItem).map { items => + items.foldLeft(cms0) { + case (cms, (t, k, n)) => + cms.add(t, k, n) + } + } + + val terminalGens: Gen[module.CMS] = genEmpty + + def gen(depth: Int): Gen[module.CMS] = + if (depth <= 0) terminalGens + else { + val recur = Gen.lzy(gen(depth - 1)) + Gen.frequency( + 1 -> genEmpty, + 7 -> recur.flatMap(genSeq(_)), + 2 -> Gen.zip(recur, recur).map { case (g0, g1) => module.monoid.plus(g0, g1) }, + 1 -> Gen.listOf(recur).map(module.monoid.sum(_)) + ) + } + + gen(2) + } + + val genDuration: Gen[Duration] = + Gen.oneOf(Duration(1, HOURS), Duration(1, DAYS), Duration(10, MINUTES), Duration(7, DAYS)) + + implicit val arbitraryDuration: Arbitrary[Duration] = + Arbitrary(genDuration) + + val stdKey: Gen[String] = + Gen.listOfN(2, Gen.choose('a', 'm')).map(_.mkString) + + val stdVal: Gen[Double] = + Gen.choose(0.0, 100.0) + + val stdItem: Gen[(String, Double)] = + Gen.zip(stdKey, stdVal) + + val stdItems: Gen[List[(String, Double)]] = + Gen.listOf(stdItem) + + def genTimestamp[K](module: DecayingCMS[K]): Gen[Long] = + Gen.choose(0L, module.halfLifeSecs.toLong * 10L) + + def genDoubleAt[K](module: DecayingCMS[K]): Gen[module.DoubleAt] = + Gen.zip(genTimestamp(module), stdVal).map { case (t, x) => module.DoubleAt(x, t) } + + def genItem(module: DecayingCMS[String]): Gen[(Long, String, Double)] = + Gen.zip(genTimestamp(module), stdKey, stdVal) + + def genItems(module: DecayingCMS[String]): Gen[List[(Long, String, Double)]] = + Gen.listOf(genItem(module)) + + def genValues(module: DecayingCMS[String]): Gen[List[(Long, Double)]] = + Gen.listOf(Gen.zip(genTimestamp(module), stdVal)) + + property("basic") { + val gk = Gen.identifier + val gt = Gen.choose(-30610206238000L, 32503698000000L) + val gn = Gen.choose(0, 0xffff).map(_.toDouble) + val gm = genModule[String] + forAll(gk, gt, gn, gm) { (key: String, t: Long, n: Double, module: DecayingCMS[String]) => + val cms0 = module.monoid.zero + val cms = cms0.add(t, key, n) + val got = cms.get(key).at(t) + Prop(got == n) :| s"$got == $n" + } + } + + property("sum(xs) = xs.foldLeft(zero)(plus)") { + forAll { (module: DecayingCMS[String]) => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(Gen.listOf(g)) { xs => + val left = module.monoid.sum(xs) + val right = xs.foldLeft(module.monoid.zero)(module.monoid.plus) + Prop(fuzzyEq(module)(left, right)) :| s"$left was not equal to $right" + } + } + } + + property("all rows should sum to the same value") { + forAll { (module: DecayingCMS[String]) => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(g) { cms => + val sum0 = cms.cells(0).sum + cms.cells.foldLeft(Prop(true)) { (res, row) => + val sum = row.sum + res && (Prop(close(sum, sum0)) :| s"close($sum, $sum0)") + } + } + } + } + + property("round-tripping timestamps works") { + forAll { (module: DecayingCMS[String]) => + val n = module.halfLifeSecs * 1000.0 + forAll(Gen.choose(-n, n)) { x => + val t = module.toTimestamp(x) + val y = module.fromTimestamp(t) + Prop(close(x, y)) :| s"close($x, $y)" + } + } + } + + property("total works reliably") { + forAll { (module: DecayingCMS[String]) => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(g, genItems(module)) { (cms0, items) => + val time = cms0.timeInHL + val cms1 = items.foldLeft(cms0) { + case (c, (_, k, v)) => + c.scaledAdd(time, k, v) + } + val got = cms1.total.value + val expected = cms0.total.value + items.map(_._3).sum + Prop(close(got, expected)) :| s"close($got, $expected) with cms1=$cms1" + } + } + } + + def timeCommutativeBinop(op: String)(f: (Double, Double) => Double): Unit = + property(s"DoubleAt operations are time-commutative with $op") { + forAll { (module: DecayingCMS[String]) => + val g = genDoubleAt(module) + forAll(g, g, genTimestamp(module)) { (x, y, t) => + val lhs = x.map2(y)(f).at(t) + val rhs = f(x.at(t), y.at(t)) + Prop(close(lhs, rhs)) :| s"close($lhs, $rhs)" + } + } + } + + def timeCommutativeUnop(op: String)(f: Double => Double): Unit = + property(s"DoubleAt operations are time-commutative with $op") { + forAll { (module: DecayingCMS[String]) => + val g = genDoubleAt(module) + forAll(g, genTimestamp(module)) { (x, t) => + val lhs = x.map(f).at(t) + val rhs = f(x.at(t)) + Prop(close(lhs, rhs)) :| s"close($lhs, $rhs)" + } + } + } + + // this idea here is that for a given operation (e.g. +) we want: + // + // (x + y).at(t) = x(t) + y(t) + // + timeCommutativeBinop("+")(_ + _) + timeCommutativeBinop("-")(_ - _) + timeCommutativeBinop("min")(_.min(_)) + timeCommutativeBinop("max")(_.max(_)) + + timeCommutativeUnop("abs")(_.abs) + timeCommutativeUnop("unary -")(-_) + timeCommutativeUnop("*2")(_ * 2.0) + + property("division is scale-independent") { + forAll { (module: DecayingCMS[String]) => + val g = genDoubleAt(module) + forAll(g, g, genTimestamp(module)) { (x, y, t) => + if (y.at(t) != 0) { + val lhs = (y * (x / y)).at(t) + val rhs = x.at(t) + Prop(close(lhs, rhs)) :| s"close($lhs, $rhs)" + } else { + Prop(true) + } + } + } + } + + property("timeToZero") { + forAll { (module: DecayingCMS[String]) => + val g = genDoubleAt(module) + forAll(g) { x => + val t = x.timeToZero + if (java.lang.Double.isFinite(t)) { + Prop(x.at(module.toTimestamp(t - 100.0)) != 0.0) && + Prop(x.at(module.toTimestamp(t + 100.0)) == 0.0) + } else { + Prop(true) + } + } + } + } + + property("timeToUnit") { + forAll { (module: DecayingCMS[String]) => + val g = genDoubleAt(module) + forAll(g) { x => + val timeInHL = x.timeToUnit + if (java.lang.Double.isFinite(timeInHL)) { + val time = module.toTimestamp(timeInHL) + val x0 = Math.abs(x.at(time - 100L)) + val x1 = Math.abs(x.at(time)) + val x2 = Math.abs(x.at(time + 100L)) + val p0 = Prop(x0 > 1.0) :| s"$x0 > 1.0" + val p1 = Prop(close(x1, 1.0)) :| s"$x1 == 1.0" + val p2 = Prop(x2 < 1.0) :| s"$x2 < 1.0" + p0 && p1 && p2 + } else { + Prop(true) + } + } + } + } + + property("((x compare y) = n) = ((x.at(t) compare y.at(t)) = n)") { + forAll { (module: DecayingCMS[String]) => + val g = genDoubleAt(module) + forAll(g, g, genTimestamp(module)) { (x, y, t) => + val got = x.compare(y) + val expected = x.at(t).compare(y.at(t)) + Prop(got == expected) + } + } + } + + property("range works reliably") { + forAll { (module: DecayingCMS[String]) => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(g, genItems(module)) { (cms0, items) => + val cms1 = cms0.bulkAdd(items) + val (minAt, maxAt) = cms1.range + val (xmin, xmax) = (minAt.value, maxAt.value) + items.iterator.map(_._2).foldLeft(Prop(true)) { (res, k) => + val x = cms1.get(k).value + res && + (Prop(xmin <= x) :| s"$xmin <= $x was false for key $k") && + (Prop(x <= xmax) :| s"$x <= $xmax was false for key $k") + } + } + } + } + + property("innerProductRoot(x, 0) = 0") { + forAll { (module: DecayingCMS[String]) => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(g) { cms => + val got = cms.innerProductRoot(module.empty) + Prop(got.value == 0.0) :| s"got $got expected 0" + } + } + } + + property("innerProductRoot(x, x) = x for singleton x") { + forAll { (module: DecayingCMS[String]) => + forAll(genItem(module)) { + case (t, k, v) => + val cms0 = module.empty.add(t, k, v) + val got = cms0.l2Norm.at(t) + val expected = v + Prop(close(got, expected)) :| s"got $got, expected $expected" + } + } + } + + property("innerProductRoot triangle inequality") { + // the triangle inequality is only approximately true. for a + // saturated CMS, it will stop being true. since normally we + // generate very small CMS structures, we choose to use a bigger + // module just for this test. + forAll(genBigModule[String]) { module => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(g, g) { (x, y) => + // abs(x + y) <= abs(x) + abs(y) + val lhs = ((x + y).l2Norm).timeToUnit + val rhs = (x.l2Norm + y.l2Norm).timeToUnit + Prop(lhs <= rhs || close(lhs, rhs)) + } + } + } + + property("(a + b) + c ~= a + (b + c) and a + b ~= b + a and a + zero = a") { + forAll { (module: DecayingCMS[String]) => + val g = genCms(module, stdKey, genTimestamp(module), stdVal) + forAll(g, g, g) { (a, b, c) => + import module.monoid.plus + val p0 = { + val left = plus(plus(a, b), c) + val right = plus(a, plus(b, c)) + Prop(fuzzyEq(module)(left, right)) :| s"associate: $left was not equal to $right" + } + + val p1 = { + val left = plus(a, b) + val right = plus(b, a) + Prop(fuzzyEq(module)(left, right)) :| s"commute: $left was not equal to $right" + } + + val p2 = { + val left = plus(a, module.monoid.zero) + Prop(fuzzyEq(module)(left, a)) :| s"a + zero = a: $left was not equal to $a" + } + + p0 && p1 && p2 + } + } + } + + property("fixed key ~ decayedvalue") { + + def makeModule(halfLife: Duration): DecayingCMS[String] = { + val seed = "FIXED".hashCode.toLong + DecayingCMS[String](seed, halfLife, depth = 2, width = 3) + } + + val genTestCase: Gen[(DecayingCMS[String], String, List[(Long, Double)])] = + for { + halfLife <- genDuration + module = makeModule(halfLife) + key <- stdKey + values <- genValues(module) + } yield (module, key, values) + + def law(testCase: (DecayingCMS[String], String, List[(Long, Double)])): Prop = { + + val (module, key, inputs0) = testCase + val halfLife = module.halfLife + val inputs = inputs0.sorted + val halfLifeSecs = halfLife.toSeconds.toDouble + + if (inputs.nonEmpty) { + + val tlast = inputs.last._1 + + val dvm = new DecayedValueMonoid(0.0) + val dv = dvm.sum(inputs.map { + case (t, n) => + DecayedValue.build(n, (t.toDouble / 1000.0), halfLifeSecs) + }) + val expected = dvm.valueAsOf(dv, halfLifeSecs, (tlast.toDouble / 1000.0)) + + val cms0 = module.monoid.zero + val cmss = inputs.map { case (t, n) => cms0.add(t, key, n) } + val cms = module.monoid.sum(cmss) + + val got = cms.get(key).at(tlast) + + Prop(close(got, expected)) :| s"$got is not close to $expected" + } else { + Prop(true) + } + } + + forAll(genTestCase)(law _) + + val regressions = + List( + ( + "", + List((-7634593159529L, 1.0), (-10628114330964L, 0.0)), + Duration(1, HOURS) + ), + ( + "", + List((29617281175711L, 65534.0), (29614132054038L, 255.0)), + Duration(7, DAYS) + ) + ) + + regressions.foldLeft(Prop(true)) { + case (res, (k, items, hl)) => + res && law((makeModule(hl), k, items)) + } + } +} From ce2d24ca9a24a5fb7f5bb7d6c5402e986e0731d6 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 20 Apr 2020 17:34:14 +0200 Subject: [PATCH 053/306] Update scala-collection-compat to 2.1.6 (#810) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a74152cee..64baf2688 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.1.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" -val scalaCollectionCompat = "2.1.5" +val scalaCollectionCompat = "2.1.6" val utilVersion = "20.4.0" val sparkVersion = "2.4.5" From 02807e76be87d826224f0019043513b99bab2b8a Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Mon, 27 Apr 2020 16:23:06 -1000 Subject: [PATCH 054/306] Update README.md --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 9be015c96..964558d75 100644 --- a/README.md +++ b/README.md @@ -98,3 +98,10 @@ Other projects built with Algebird, as compiled by the Scaladex: [![Scaladex Dep Copyright 2016 Twitter, Inc. Licensed under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0). + +### Thanks to Yourkit +YourKit supports open source projects with innovative and intelligent tools +for monitoring and profiling Java and .NET applications. +YourKit is the creator of YourKit Java Profiler, +YourKit .NET Profiler, +and YourKit YouMonitor. From 53c3c00e26aa8e2aa77ee02a629f9553eb5b6853 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 4 May 2020 14:28:46 +0200 Subject: [PATCH 055/306] Update util-core to 20.4.1 (#813) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 64baf2688..5641dc422 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.1.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" -val utilVersion = "20.4.0" +val utilVersion = "20.4.1" val sparkVersion = "2.4.5" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 0520b615eee1f4adbc82457b9499b9a09777a4d4 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 4 May 2020 17:54:51 +0200 Subject: [PATCH 056/306] Update sbt-microsites to 1.2.0 (#811) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 980dc44a2..390fe5f04 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.1.5") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.0") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") From 515fa2bc0a86c7c5e70afa91571cb297d8df2271 Mon Sep 17 00:00:00 2001 From: Neville Li Date: Tue, 5 May 2020 21:35:18 -0400 Subject: [PATCH 057/306] Bump scalafmt to 2.5.1 (#816) * Bump scalafmt to 2.5.1 * format sbt --- .scalafmt.conf | 4 +- .../bijection/AlgebirdBijections.scala | 4 +- .../com/twitter/algebird/AdaptiveCache.scala | 6 +- .../com/twitter/algebird/AdaptiveVector.scala | 6 +- .../com/twitter/algebird/Aggregator.scala | 32 +- .../com/twitter/algebird/Applicative.scala | 8 +- .../com/twitter/algebird/Approximate.scala | 4 +- .../com/twitter/algebird/AveragedValue.scala | 2 +- .../com/twitter/algebird/BloomFilter.scala | 2 +- .../com/twitter/algebird/DecayedVector.scala | 8 +- .../com/twitter/algebird/DecayingCMS.scala | 4 +- .../com/twitter/algebird/Eventually.scala | 16 +- .../scala/com/twitter/algebird/ExpHist.scala | 10 +- .../scala/com/twitter/algebird/Fold.scala | 28 +- .../algebird/GeneratedAbstractAlgebra.scala | 620 +++++++++--------- .../algebird/GeneratedProductAlgebra.scala | 612 ++++++++--------- .../com/twitter/algebird/HyperLogLog.scala | 2 +- .../twitter/algebird/HyperLogLogSeries.scala | 22 +- .../scala/com/twitter/algebird/Interval.scala | 4 +- .../com/twitter/algebird/MapAlgebra.scala | 17 +- .../com/twitter/algebird/MomentsGroup.scala | 4 +- .../scala/com/twitter/algebird/Monoid.scala | 2 +- .../com/twitter/algebird/MurmurHash.scala | 11 +- .../scala/com/twitter/algebird/QTree.scala | 15 +- .../com/twitter/algebird/RightFolded2.scala | 4 +- .../com/twitter/algebird/Semigroup.scala | 4 +- .../com/twitter/algebird/SketchMap.scala | 12 +- .../com/twitter/algebird/SpaceSaver.scala | 6 +- .../com/twitter/algebird/TopKMonoid.scala | 4 +- .../algebird/monad/StateWithError.scala | 4 +- .../algebird/generic/EquivOrdering.scala | 3 +- .../twitter/algebird/generic/Instances.scala | 12 +- .../twitter/algebird/spark/AlgebirdRDD.scala | 4 +- .../twitter/algebird/ApplicativeLaws.scala | 15 +- .../algebird/BaseVectorSpaceProperties.scala | 4 +- .../com/twitter/algebird/FunctorLaws.scala | 9 +- .../com/twitter/algebird/MonadLaws.scala | 23 +- .../twitter/algebird/StatefulSummerLaws.scala | 9 +- .../com/twitter/algebird/AggregatorLaws.scala | 18 +- .../algebird/CollectionSpecification.scala | 22 +- .../com/twitter/algebird/CombinatorTest.scala | 13 +- .../twitter/algebird/CountMinSketchTest.scala | 3 +- .../com/twitter/algebird/EventuallyTest.scala | 3 +- .../scala/com/twitter/algebird/FoldTest.scala | 16 +- .../GeneratedAbstractAlgebraLaws.scala | 45 +- .../GeneratedProductAlgebraLaws.scala | 45 +- .../com/twitter/algebird/PreparerLaws.scala | 8 +- .../twitter/algebird/RightFolded2Test.scala | 12 +- .../algebird/TupleAggregatorsTest.scala | 64 +- .../algebird/util/summer/NullSummer.scala | 4 +- project/GenTupleAggregators.scala | 2 +- 51 files changed, 919 insertions(+), 892 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index b7520854a..e605791d1 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,6 +1,8 @@ -version=2.4.2 +version=2.5.1 maxColumn = 110 docstrings = JavaDoc +newlines.alwaysBeforeMultilineDef = false newlines.penalizeSingleSelectMultiArgList = false align.openParenCallSite = false rewrite.rules = [AvoidInfix, SortImports, RedundantBraces, RedundantParens, PreferCurlyFors] +rewrite.redundantBraces.generalExpressions = false diff --git a/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala b/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala index a6a64d005..22f0a2bbb 100644 --- a/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala +++ b/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala @@ -55,8 +55,8 @@ class BijectedRing[T, U](implicit val ring: Ring[T], bij: ImplicitBijection[T, U } trait AlgebirdBijections { - implicit def semigroupBijection[T, U]( - implicit bij: ImplicitBijection[T, U] + implicit def semigroupBijection[T, U](implicit + bij: ImplicitBijection[T, U] ): Bijection[Semigroup[T], Semigroup[U]] = new AbstractBijection[Semigroup[T], Semigroup[U]] { override def apply(sg: Semigroup[T]) = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala index f481e95ea..b59c0a787 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala @@ -75,8 +75,10 @@ class AdaptiveCache[K, V: Semigroup](maxCapacity: Int, growthMargin: Double = 3. var ret = evicted - if (currentCapacity < maxCapacity && - sentinelCache.size > (currentCapacity * growthMargin)) { + if ( + currentCapacity < maxCapacity && + sentinelCache.size > (currentCapacity * growthMargin) + ) { currentCapacity = (currentCapacity * 2).min(maxCapacity) ret = (ret, summingCache.flush) match { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala index c7d3c74d5..f0a310c9d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala @@ -186,9 +186,9 @@ sealed trait AdaptiveVector[V] extends IndexedSeq[V] { /** Iterator of indices and values of all non-sparse values */ def denseIterator: Iterator[(Int, V)] /* - * Note that IndexedSeq provides hashCode and equals that - * work correctly based on length and apply. - */ + * Note that IndexedSeq provides hashCode and equals that + * work correctly based on length and apply. + */ } case class DenseVector[V](iseq: Vector[V], override val sparseValue: V, override val denseCount: Int) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala index 7efe5da31..6dc5fb038 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala @@ -67,8 +67,8 @@ object Aggregator extends java.io.Serializable { * Obtain an [[Aggregator]] that uses an efficient append operation for faster aggregation. * Equivalent to {{{ appendSemigroup(prep, appnd, identity[T]_)(sg) }}} */ - def appendSemigroup[F, T](prep: F => T, appnd: (T, F) => T)( - implicit sg: Semigroup[T] + def appendSemigroup[F, T](prep: F => T, appnd: (T, F) => T)(implicit + sg: Semigroup[T] ): Aggregator[F, T, T] = appendSemigroup(prep, appnd, identity[T])(sg) @@ -84,8 +84,8 @@ object Aggregator extends java.io.Serializable { * @param sg The [[Semigroup]] type class * @note The functions 'appnd' and 'prep' are expected to obey the law: {{{ appnd(t, f) == sg.plus(t, prep(f)) }}} */ - def appendSemigroup[F, T, P](prep: F => T, appnd: (T, F) => T, pres: T => P)( - implicit sg: Semigroup[T] + def appendSemigroup[F, T, P](prep: F => T, appnd: (T, F) => T, pres: T => P)(implicit + sg: Semigroup[T] ): Aggregator[F, T, P] = new Aggregator[F, T, P] { override def semigroup: Semigroup[T] = sg @@ -130,8 +130,8 @@ object Aggregator extends java.io.Serializable { * @param m The [[Monoid]] type class * @note The function 'appnd' is expected to obey the law: {{{ appnd(t, f) == m.plus(t, appnd(m.zero, f)) }}} */ - def appendMonoid[F, T, P](appnd: (T, F) => T, pres: T => P)( - implicit m: Monoid[T] + def appendMonoid[F, T, P](appnd: (T, F) => T, pres: T => P)(implicit + m: Monoid[T] ): MonoidAggregator[F, T, P] = new MonoidAggregator[F, T, P] { override def monoid: Monoid[T] = m @@ -320,8 +320,8 @@ object Aggregator extends java.io.Serializable { * Returns the lower bound of a given percentile where the percentile is between (0,1] * The items that are iterated over cannot be negative. */ - def approximatePercentile[T](percentile: Double, k: Int = QTreeAggregator.DefaultK)( - implicit num: Numeric[T] + def approximatePercentile[T](percentile: Double, k: Int = QTreeAggregator.DefaultK)(implicit + num: Numeric[T] ): QTreeAggregatorLowerBound[T] = QTreeAggregatorLowerBound[T](percentile, k) @@ -329,8 +329,8 @@ object Aggregator extends java.io.Serializable { * Returns the intersection of a bounded percentile where the percentile is between (0,1] * The items that are iterated over cannot be negative. */ - def approximatePercentileBounds[T](percentile: Double, k: Int = QTreeAggregator.DefaultK)( - implicit num: Numeric[T] + def approximatePercentileBounds[T](percentile: Double, k: Int = QTreeAggregator.DefaultK)(implicit + num: Numeric[T] ): QTreeAggregator[T] = QTreeAggregator[T](percentile, k) @@ -482,10 +482,14 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => * joining a Fold with an Aggregator to produce a Fold */ def toFold: Fold[A, Option[C]] = - Fold.fold[Option[B], A, Option[C]]({ - case (None, a) => Some(self.prepare(a)) - case (Some(b), a) => Some(self.append(b, a)) - }, None, _.map(self.present)) + Fold.fold[Option[B], A, Option[C]]( + { + case (None, a) => Some(self.prepare(a)) + case (Some(b), a) => Some(self.append(b, a)) + }, + None, + _.map(self.present) + ) def lift: MonoidAggregator[A, Option[B], Option[C]] = new MonoidAggregator[A, Option[B], Option[C]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala index f705c1431..60e59ec11 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala @@ -87,12 +87,12 @@ object Applicative { app.join(mt, mu) def join[M[_], T1, T2, T3](m1: M[T1], m2: M[T2], m3: M[T3])(implicit app: Applicative[M]): M[(T1, T2, T3)] = app.join(m1, m2, m3) - def join[M[_], T1, T2, T3, T4](m1: M[T1], m2: M[T2], m3: M[T3], m4: M[T4])( - implicit app: Applicative[M] + def join[M[_], T1, T2, T3, T4](m1: M[T1], m2: M[T2], m3: M[T3], m4: M[T4])(implicit + app: Applicative[M] ): M[(T1, T2, T3, T4)] = app.join(m1, m2, m3, m4) - def join[M[_], T1, T2, T3, T4, T5](m1: M[T1], m2: M[T2], m3: M[T3], m4: M[T4], m5: M[T5])( - implicit app: Applicative[M] + def join[M[_], T1, T2, T3, T4, T5](m1: M[T1], m2: M[T2], m3: M[T3], m4: M[T4], m5: M[T5])(implicit + app: Applicative[M] ): M[(T1, T2, T3, T4, T5)] = app.join(m1, m2, m3, m4, m5) def sequence[M[_], T](ms: Seq[M[T]])(implicit app: Applicative[M]): M[Seq[T]] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala b/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala index 80d8a7f53..9a9adde99 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala @@ -71,8 +71,8 @@ object ApproximateBoolean { } // Note the probWithinBounds is a LOWER BOUND (at least this probability) -case class Approximate[N](min: N, estimate: N, max: N, probWithinBounds: Double)( - implicit val numeric: Numeric[N] +case class Approximate[N](min: N, estimate: N, max: N, probWithinBounds: Double)(implicit + val numeric: Numeric[N] ) extends ApproximateSet[N] { require(numeric.lteq(min, estimate) && numeric.lteq(estimate, max)) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala index c7018e8b7..beb263c9e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala @@ -136,7 +136,7 @@ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[Averaged override val zero: AveragedValue = AveragedValue(0L, 0.0) - override def isNonZero(av: AveragedValue): Boolean = (av.count != 0L) + override def isNonZero(av: AveragedValue): Boolean = av.count != 0L override def negate(av: AveragedValue): AveragedValue = -av diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index ad6e5bc37..57e2c043d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -299,7 +299,7 @@ object BF { var pos = 0 - override def hasNext: Boolean = (pos < len) + override def hasNext: Boolean = pos < len override def next: Int = { val n = uniqVs(pos) pos += 1 diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala index 5222d6d52..d37f3c3e2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala @@ -54,14 +54,14 @@ object DecayedVector extends CompatDecayedVector { )(implicit vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], metric: Metric[Map[K, Double]]) = monoidWithEpsilon[({ type x[a] = Map[K, a] })#x](eps) - implicit def mapMonoid[K]( - implicit vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], + implicit def mapMonoid[K](implicit + vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], metric: Metric[Map[K, Double]] ) = mapMonoidWithEpsilon(-1.0) - def scaledPlus[C[_]](newVal: DecayedVector[C], oldVal: DecayedVector[C], eps: Double)( - implicit vs: VectorSpace[Double, C], + def scaledPlus[C[_]](newVal: DecayedVector[C], oldVal: DecayedVector[C], eps: Double)(implicit + vs: VectorSpace[Double, C], metric: Metric[C[Double]] ): DecayedVector[C] = { implicit val mon: Monoid[C[Double]] = vs.group diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala index 56e6b4469..b0181b4e7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala @@ -681,8 +681,8 @@ object DecayingCMS { * (e.g. 5-10) is fine. Each update requires O(depth) work so you * want to keep this as small as possible. */ - def apply[K](seed: Long, halfLife: Duration, depth: Int, width: Int)( - implicit hasher: CMSHasher[K] + def apply[K](seed: Long, halfLife: Duration, depth: Int, width: Int)(implicit + hasher: CMSHasher[K] ): DecayingCMS[K] = new DecayingCMS(seed, halfLife, depth, width, hasher) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index 927b7e6ff..e9088f58e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -37,8 +37,8 @@ import scala.collection.compat._ * @param E eventual type * @param O original type */ -class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)( - implicit eventualSemigroup: Semigroup[E], +class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit + eventualSemigroup: Semigroup[E], originalSemigroup: Semigroup[O] ) extends Semigroup[Either[E, O]] { @@ -124,8 +124,8 @@ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)( /** * @see EventuallySemigroup */ -class EventuallyMonoid[E, O](convert: O => E)(mustConvert: O => Boolean)( - implicit lSemigroup: Semigroup[E], +class EventuallyMonoid[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit + lSemigroup: Semigroup[E], rMonoid: Monoid[O] ) extends EventuallySemigroup[E, O](convert)(mustConvert) with Monoid[Either[E, O]] { @@ -137,8 +137,8 @@ class EventuallyMonoid[E, O](convert: O => E)(mustConvert: O => Boolean)( /** * @see EventuallySemigroup */ -class EventuallyGroup[E, O](convert: O => E)(mustConvert: O => Boolean)( - implicit lGroup: Group[E], +class EventuallyGroup[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit + lGroup: Group[E], rGroup: Group[O] ) extends EventuallyMonoid[E, O](convert)(mustConvert) with Group[Either[E, O]] { @@ -156,8 +156,8 @@ class EventuallyGroup[E, O](convert: O => E)(mustConvert: O => Boolean)( /** * @see EventuallySemigroup */ -class EventuallyRing[E, O](convert: O => E)(mustConvert: O => Boolean)( - implicit lRing: Ring[E], +class EventuallyRing[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit + lRing: Ring[E], rRing: Ring[O] ) extends EventuallyGroup[E, O](convert)(mustConvert) with Ring[Either[E, O]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index 3237f7923..c0bb6fe32 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -101,9 +101,13 @@ case class ExpHist( * into this exponential histogram instance. */ def fold: Fold[Bucket, ExpHist] = - Fold.foldMutable[Builder[Bucket, Vector[Bucket]], Bucket, ExpHist]({ - case (b, bucket) => b += bucket - }, _ => Vector.newBuilder[Bucket], x => addAll(x.result)) + Fold.foldMutable[Builder[Bucket, Vector[Bucket]], Bucket, ExpHist]( + { + case (b, bucket) => b += bucket + }, + _ => Vector.newBuilder[Bucket], + x => addAll(x.result) + ) // This internal method assumes that the instance is stepped forward // already, and does NOT try to step internally. It also assumes diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index 0ef7a5e62..43393b6a1 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -83,11 +83,15 @@ sealed trait Fold[-I, +O] extends Serializable { override def build(): FoldState[X, I2, Q] = { val first = self.build() val second = other.build() - new FoldState({ - case ((x, y), i) => (first.add(x, i), second.add(y, i)) - }, (first.start, second.start), { - case (x, y) => f(first.end(x), second.end(y)) - }) + new FoldState( + { + case ((x, y), i) => (first.add(x, i), second.add(y, i)) + }, + (first.start, second.start), + { + case (x, y) => f(first.end(x), second.end(y)) + } + ) } } } @@ -231,7 +235,7 @@ object Fold extends CompatFold { val starts: Seq[Any] = bs.map(_.start) val add: (Seq[Any], I) => Seq[Any] = { (xs, i) => adds.zip(xs).map { case (f, x) => f(x, i) } } - val end: (Seq[Any] => Seq[O]) = { xs => ends.zip(xs).map { case (f, x) => f(x) } } + val end: (Seq[Any] => Seq[O]) = { xs => ends.zip(xs).map { case (f, x) => f(x) } } new FoldState(add, starts, end) } } @@ -276,9 +280,9 @@ object Fold extends CompatFold { */ def max[I](implicit ordering: Ordering[I]): Fold[I, Option[I]] = Fold.foldLeft[I, Option[I]](None) { - case (None, i) => Some(i) - case (Some(y), i) if (ordering.compare(y, i) < 0) => Some(i) - case (x, _) => x + case (None, i) => Some(i) + case (Some(y), i) if ordering.compare(y, i) < 0 => Some(i) + case (x, _) => x } /** @@ -286,9 +290,9 @@ object Fold extends CompatFold { */ def min[I](implicit ordering: Ordering[I]): Fold[I, Option[I]] = Fold.foldLeft[I, Option[I]](None) { - case (None, i) => Some(i) - case (Some(y), i) if (ordering.compare(y, i) > 0) => Some(i) - case (x, _) => x + case (None, i) => Some(i) + case (Some(y), i) if ordering.compare(y, i) > 0 => Some(i) + case (x, _) => x } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala index c9713e928..8da148093 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala @@ -54,8 +54,8 @@ class Tuple2Ring[A, B](implicit aring: Ring[A], bring: Ring[B]) extends Tuple2Gr /** * Combine 3 semigroups into a product semigroup */ -class Tuple3Semigroup[A, B, C]( - implicit asemigroup: Semigroup[A], +class Tuple3Semigroup[A, B, C](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C] ) extends Semigroup[(A, B, C)] { @@ -107,8 +107,8 @@ class Tuple3Ring[A, B, C](implicit aring: Ring[A], bring: Ring[B], cring: Ring[C /** * Combine 4 semigroups into a product semigroup */ -class Tuple4Semigroup[A, B, C, D]( - implicit asemigroup: Semigroup[A], +class Tuple4Semigroup[A, B, C, D](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D] @@ -138,8 +138,8 @@ class Tuple4Semigroup[A, B, C, D]( /** * Combine 4 monoids into a product monoid */ -class Tuple4Monoid[A, B, C, D]( - implicit amonoid: Monoid[A], +class Tuple4Monoid[A, B, C, D](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D] @@ -174,8 +174,8 @@ class Tuple4Ring[A, B, C, D](implicit aring: Ring[A], bring: Ring[B], cring: Rin /** * Combine 5 semigroups into a product semigroup */ -class Tuple5Semigroup[A, B, C, D, E]( - implicit asemigroup: Semigroup[A], +class Tuple5Semigroup[A, B, C, D, E](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -208,8 +208,8 @@ class Tuple5Semigroup[A, B, C, D, E]( /** * Combine 5 monoids into a product monoid */ -class Tuple5Monoid[A, B, C, D, E]( - implicit amonoid: Monoid[A], +class Tuple5Monoid[A, B, C, D, E](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -223,8 +223,8 @@ class Tuple5Monoid[A, B, C, D, E]( /** * Combine 5 groups into a product group */ -class Tuple5Group[A, B, C, D, E]( - implicit agroup: Group[A], +class Tuple5Group[A, B, C, D, E](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -246,8 +246,8 @@ class Tuple5Group[A, B, C, D, E]( /** * Combine 5 rings into a product ring */ -class Tuple5Ring[A, B, C, D, E]( - implicit aring: Ring[A], +class Tuple5Ring[A, B, C, D, E](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -268,8 +268,8 @@ class Tuple5Ring[A, B, C, D, E]( /** * Combine 6 semigroups into a product semigroup */ -class Tuple6Semigroup[A, B, C, D, E, F]( - implicit asemigroup: Semigroup[A], +class Tuple6Semigroup[A, B, C, D, E, F](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -305,8 +305,8 @@ class Tuple6Semigroup[A, B, C, D, E, F]( /** * Combine 6 monoids into a product monoid */ -class Tuple6Monoid[A, B, C, D, E, F]( - implicit amonoid: Monoid[A], +class Tuple6Monoid[A, B, C, D, E, F](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -321,8 +321,8 @@ class Tuple6Monoid[A, B, C, D, E, F]( /** * Combine 6 groups into a product group */ -class Tuple6Group[A, B, C, D, E, F]( - implicit agroup: Group[A], +class Tuple6Group[A, B, C, D, E, F](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -353,8 +353,8 @@ class Tuple6Group[A, B, C, D, E, F]( /** * Combine 6 rings into a product ring */ -class Tuple6Ring[A, B, C, D, E, F]( - implicit aring: Ring[A], +class Tuple6Ring[A, B, C, D, E, F](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -378,8 +378,8 @@ class Tuple6Ring[A, B, C, D, E, F]( /** * Combine 7 semigroups into a product semigroup */ -class Tuple7Semigroup[A, B, C, D, E, F, G]( - implicit asemigroup: Semigroup[A], +class Tuple7Semigroup[A, B, C, D, E, F, G](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -429,8 +429,8 @@ class Tuple7Semigroup[A, B, C, D, E, F, G]( /** * Combine 7 monoids into a product monoid */ -class Tuple7Monoid[A, B, C, D, E, F, G]( - implicit amonoid: Monoid[A], +class Tuple7Monoid[A, B, C, D, E, F, G](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -446,8 +446,8 @@ class Tuple7Monoid[A, B, C, D, E, F, G]( /** * Combine 7 groups into a product group */ -class Tuple7Group[A, B, C, D, E, F, G]( - implicit agroup: Group[A], +class Tuple7Group[A, B, C, D, E, F, G](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -481,8 +481,8 @@ class Tuple7Group[A, B, C, D, E, F, G]( /** * Combine 7 rings into a product ring */ -class Tuple7Ring[A, B, C, D, E, F, G]( - implicit aring: Ring[A], +class Tuple7Ring[A, B, C, D, E, F, G](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -508,8 +508,8 @@ class Tuple7Ring[A, B, C, D, E, F, G]( /** * Combine 8 semigroups into a product semigroup */ -class Tuple8Semigroup[A, B, C, D, E, F, G, H]( - implicit asemigroup: Semigroup[A], +class Tuple8Semigroup[A, B, C, D, E, F, G, H](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -563,8 +563,8 @@ class Tuple8Semigroup[A, B, C, D, E, F, G, H]( /** * Combine 8 monoids into a product monoid */ -class Tuple8Monoid[A, B, C, D, E, F, G, H]( - implicit amonoid: Monoid[A], +class Tuple8Monoid[A, B, C, D, E, F, G, H](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -590,8 +590,8 @@ class Tuple8Monoid[A, B, C, D, E, F, G, H]( /** * Combine 8 groups into a product group */ -class Tuple8Group[A, B, C, D, E, F, G, H]( - implicit agroup: Group[A], +class Tuple8Group[A, B, C, D, E, F, G, H](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -628,8 +628,8 @@ class Tuple8Group[A, B, C, D, E, F, G, H]( /** * Combine 8 rings into a product ring */ -class Tuple8Ring[A, B, C, D, E, F, G, H]( - implicit aring: Ring[A], +class Tuple8Ring[A, B, C, D, E, F, G, H](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -657,8 +657,8 @@ class Tuple8Ring[A, B, C, D, E, F, G, H]( /** * Combine 9 semigroups into a product semigroup */ -class Tuple9Semigroup[A, B, C, D, E, F, G, H, I]( - implicit asemigroup: Semigroup[A], +class Tuple9Semigroup[A, B, C, D, E, F, G, H, I](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -721,8 +721,8 @@ class Tuple9Semigroup[A, B, C, D, E, F, G, H, I]( /** * Combine 9 monoids into a product monoid */ -class Tuple9Monoid[A, B, C, D, E, F, G, H, I]( - implicit amonoid: Monoid[A], +class Tuple9Monoid[A, B, C, D, E, F, G, H, I](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -750,8 +750,8 @@ class Tuple9Monoid[A, B, C, D, E, F, G, H, I]( /** * Combine 9 groups into a product group */ -class Tuple9Group[A, B, C, D, E, F, G, H, I]( - implicit agroup: Group[A], +class Tuple9Group[A, B, C, D, E, F, G, H, I](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -794,8 +794,8 @@ class Tuple9Group[A, B, C, D, E, F, G, H, I]( /** * Combine 9 rings into a product ring */ -class Tuple9Ring[A, B, C, D, E, F, G, H, I]( - implicit aring: Ring[A], +class Tuple9Ring[A, B, C, D, E, F, G, H, I](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -828,8 +828,8 @@ class Tuple9Ring[A, B, C, D, E, F, G, H, I]( /** * Combine 10 semigroups into a product semigroup */ -class Tuple10Semigroup[A, B, C, D, E, F, G, H, I, J]( - implicit asemigroup: Semigroup[A], +class Tuple10Semigroup[A, B, C, D, E, F, G, H, I, J](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -897,8 +897,8 @@ class Tuple10Semigroup[A, B, C, D, E, F, G, H, I, J]( /** * Combine 10 monoids into a product monoid */ -class Tuple10Monoid[A, B, C, D, E, F, G, H, I, J]( - implicit amonoid: Monoid[A], +class Tuple10Monoid[A, B, C, D, E, F, G, H, I, J](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -928,8 +928,8 @@ class Tuple10Monoid[A, B, C, D, E, F, G, H, I, J]( /** * Combine 10 groups into a product group */ -class Tuple10Group[A, B, C, D, E, F, G, H, I, J]( - implicit agroup: Group[A], +class Tuple10Group[A, B, C, D, E, F, G, H, I, J](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -975,8 +975,8 @@ class Tuple10Group[A, B, C, D, E, F, G, H, I, J]( /** * Combine 10 rings into a product ring */ -class Tuple10Ring[A, B, C, D, E, F, G, H, I, J]( - implicit aring: Ring[A], +class Tuple10Ring[A, B, C, D, E, F, G, H, I, J](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1022,8 +1022,8 @@ class Tuple10Ring[A, B, C, D, E, F, G, H, I, J]( /** * Combine 11 semigroups into a product semigroup */ -class Tuple11Semigroup[A, B, C, D, E, F, G, H, I, J, K]( - implicit asemigroup: Semigroup[A], +class Tuple11Semigroup[A, B, C, D, E, F, G, H, I, J, K](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1095,8 +1095,8 @@ class Tuple11Semigroup[A, B, C, D, E, F, G, H, I, J, K]( /** * Combine 11 monoids into a product monoid */ -class Tuple11Monoid[A, B, C, D, E, F, G, H, I, J, K]( - implicit amonoid: Monoid[A], +class Tuple11Monoid[A, B, C, D, E, F, G, H, I, J, K](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1128,8 +1128,8 @@ class Tuple11Monoid[A, B, C, D, E, F, G, H, I, J, K]( /** * Combine 11 groups into a product group */ -class Tuple11Group[A, B, C, D, E, F, G, H, I, J, K]( - implicit agroup: Group[A], +class Tuple11Group[A, B, C, D, E, F, G, H, I, J, K](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1178,8 +1178,8 @@ class Tuple11Group[A, B, C, D, E, F, G, H, I, J, K]( /** * Combine 11 rings into a product ring */ -class Tuple11Ring[A, B, C, D, E, F, G, H, I, J, K]( - implicit aring: Ring[A], +class Tuple11Ring[A, B, C, D, E, F, G, H, I, J, K](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1228,8 +1228,8 @@ class Tuple11Ring[A, B, C, D, E, F, G, H, I, J, K]( /** * Combine 12 semigroups into a product semigroup */ -class Tuple12Semigroup[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit asemigroup: Semigroup[A], +class Tuple12Semigroup[A, B, C, D, E, F, G, H, I, J, K, L](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1305,8 +1305,8 @@ class Tuple12Semigroup[A, B, C, D, E, F, G, H, I, J, K, L]( /** * Combine 12 monoids into a product monoid */ -class Tuple12Monoid[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit amonoid: Monoid[A], +class Tuple12Monoid[A, B, C, D, E, F, G, H, I, J, K, L](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1340,8 +1340,8 @@ class Tuple12Monoid[A, B, C, D, E, F, G, H, I, J, K, L]( /** * Combine 12 groups into a product group */ -class Tuple12Group[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit agroup: Group[A], +class Tuple12Group[A, B, C, D, E, F, G, H, I, J, K, L](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1393,8 +1393,8 @@ class Tuple12Group[A, B, C, D, E, F, G, H, I, J, K, L]( /** * Combine 12 rings into a product ring */ -class Tuple12Ring[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit aring: Ring[A], +class Tuple12Ring[A, B, C, D, E, F, G, H, I, J, K, L](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1446,8 +1446,8 @@ class Tuple12Ring[A, B, C, D, E, F, G, H, I, J, K, L]( /** * Combine 13 semigroups into a product semigroup */ -class Tuple13Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit asemigroup: Semigroup[A], +class Tuple13Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1528,8 +1528,8 @@ class Tuple13Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M]( /** * Combine 13 monoids into a product monoid */ -class Tuple13Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit amonoid: Monoid[A], +class Tuple13Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1565,8 +1565,8 @@ class Tuple13Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M]( /** * Combine 13 groups into a product group */ -class Tuple13Group[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit agroup: Group[A], +class Tuple13Group[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1621,8 +1621,8 @@ class Tuple13Group[A, B, C, D, E, F, G, H, I, J, K, L, M]( /** * Combine 13 rings into a product ring */ -class Tuple13Ring[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit aring: Ring[A], +class Tuple13Ring[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1677,8 +1677,8 @@ class Tuple13Ring[A, B, C, D, E, F, G, H, I, J, K, L, M]( /** * Combine 14 semigroups into a product semigroup */ -class Tuple14Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit asemigroup: Semigroup[A], +class Tuple14Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1763,8 +1763,8 @@ class Tuple14Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( /** * Combine 14 monoids into a product monoid */ -class Tuple14Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit amonoid: Monoid[A], +class Tuple14Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1802,8 +1802,8 @@ class Tuple14Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( /** * Combine 14 groups into a product group */ -class Tuple14Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit agroup: Group[A], +class Tuple14Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1863,8 +1863,8 @@ class Tuple14Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( /** * Combine 14 rings into a product ring */ -class Tuple14Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit aring: Ring[A], +class Tuple14Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1922,8 +1922,8 @@ class Tuple14Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( /** * Combine 15 semigroups into a product semigroup */ -class Tuple15Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit asemigroup: Semigroup[A], +class Tuple15Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2012,8 +2012,8 @@ class Tuple15Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( /** * Combine 15 monoids into a product monoid */ -class Tuple15Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit amonoid: Monoid[A], +class Tuple15Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2053,8 +2053,8 @@ class Tuple15Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( /** * Combine 15 groups into a product group */ -class Tuple15Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit agroup: Group[A], +class Tuple15Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2117,8 +2117,8 @@ class Tuple15Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( /** * Combine 15 rings into a product ring */ -class Tuple15Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit aring: Ring[A], +class Tuple15Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -2179,8 +2179,8 @@ class Tuple15Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( /** * Combine 16 semigroups into a product semigroup */ -class Tuple16Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit asemigroup: Semigroup[A], +class Tuple16Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2274,8 +2274,8 @@ class Tuple16Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( /** * Combine 16 monoids into a product monoid */ -class Tuple16Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit amonoid: Monoid[A], +class Tuple16Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2317,8 +2317,8 @@ class Tuple16Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( /** * Combine 16 groups into a product group */ -class Tuple16Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit agroup: Group[A], +class Tuple16Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2384,8 +2384,8 @@ class Tuple16Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( /** * Combine 16 rings into a product ring */ -class Tuple16Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit aring: Ring[A], +class Tuple16Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -2449,8 +2449,8 @@ class Tuple16Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( /** * Combine 17 semigroups into a product semigroup */ -class Tuple17Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit asemigroup: Semigroup[A], +class Tuple17Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2548,8 +2548,8 @@ class Tuple17Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( /** * Combine 17 monoids into a product monoid */ -class Tuple17Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit amonoid: Monoid[A], +class Tuple17Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2593,8 +2593,8 @@ class Tuple17Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( /** * Combine 17 groups into a product group */ -class Tuple17Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit agroup: Group[A], +class Tuple17Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2663,8 +2663,8 @@ class Tuple17Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( /** * Combine 17 rings into a product ring */ -class Tuple17Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit aring: Ring[A], +class Tuple17Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -2731,8 +2731,8 @@ class Tuple17Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( /** * Combine 18 semigroups into a product semigroup */ -class Tuple18Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit asemigroup: Semigroup[A], +class Tuple18Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2834,8 +2834,8 @@ class Tuple18Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( /** * Combine 18 monoids into a product monoid */ -class Tuple18Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit amonoid: Monoid[A], +class Tuple18Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2881,8 +2881,8 @@ class Tuple18Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( /** * Combine 18 groups into a product group */ -class Tuple18Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit agroup: Group[A], +class Tuple18Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2954,8 +2954,8 @@ class Tuple18Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( /** * Combine 18 rings into a product ring */ -class Tuple18Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit aring: Ring[A], +class Tuple18Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3025,8 +3025,8 @@ class Tuple18Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( /** * Combine 19 semigroups into a product semigroup */ -class Tuple19Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit asemigroup: Semigroup[A], +class Tuple19Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -3133,8 +3133,8 @@ class Tuple19Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( /** * Combine 19 monoids into a product monoid */ -class Tuple19Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit amonoid: Monoid[A], +class Tuple19Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -3182,8 +3182,8 @@ class Tuple19Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( /** * Combine 19 groups into a product group */ -class Tuple19Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit agroup: Group[A], +class Tuple19Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3258,8 +3258,8 @@ class Tuple19Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( /** * Combine 19 rings into a product ring */ -class Tuple19Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit aring: Ring[A], +class Tuple19Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3332,8 +3332,8 @@ class Tuple19Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( /** * Combine 20 semigroups into a product semigroup */ -class Tuple20Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit asemigroup: Semigroup[A], +class Tuple20Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -3444,8 +3444,8 @@ class Tuple20Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, /** * Combine 20 monoids into a product monoid */ -class Tuple20Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit amonoid: Monoid[A], +class Tuple20Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -3495,8 +3495,8 @@ class Tuple20Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( /** * Combine 20 groups into a product group */ -class Tuple20Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit agroup: Group[A], +class Tuple20Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3574,8 +3574,8 @@ class Tuple20Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( /** * Combine 20 rings into a product ring */ -class Tuple20Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit aring: Ring[A], +class Tuple20Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3651,8 +3651,8 @@ class Tuple20Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( /** * Combine 21 semigroups into a product semigroup */ -class Tuple21Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit asemigroup: Semigroup[A], +class Tuple21Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -3767,8 +3767,8 @@ class Tuple21Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, /** * Combine 21 monoids into a product monoid */ -class Tuple21Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit amonoid: Monoid[A], +class Tuple21Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -3820,8 +3820,8 @@ class Tuple21Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, /** * Combine 21 groups into a product group */ -class Tuple21Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit agroup: Group[A], +class Tuple21Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3902,8 +3902,8 @@ class Tuple21Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U /** * Combine 21 rings into a product ring */ -class Tuple21Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit aring: Ring[A], +class Tuple21Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3982,8 +3982,8 @@ class Tuple21Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] /** * Combine 22 semigroups into a product semigroup */ -class Tuple22Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit asemigroup: Semigroup[A], +class Tuple22Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4103,8 +4103,8 @@ class Tuple22Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, /** * Combine 22 monoids into a product monoid */ -class Tuple22Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit amonoid: Monoid[A], +class Tuple22Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4158,8 +4158,8 @@ class Tuple22Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, /** * Combine 22 groups into a product group */ -class Tuple22Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit agroup: Group[A], +class Tuple22Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -4243,8 +4243,8 @@ class Tuple22Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U /** * Combine 22 rings into a product ring */ -class Tuple22Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit aring: Ring[A], +class Tuple22Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -4324,29 +4324,29 @@ class Tuple22Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, } trait GeneratedSemigroupImplicits { - implicit def semigroup2[A, B]( - implicit asemigroup: Semigroup[A], + implicit def semigroup2[A, B](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B] ): Semigroup[(A, B)] = new Tuple2Semigroup[A, B]()(asemigroup, bsemigroup) - implicit def semigroup3[A, B, C]( - implicit asemigroup: Semigroup[A], + implicit def semigroup3[A, B, C](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C] ): Semigroup[(A, B, C)] = new Tuple3Semigroup[A, B, C]()(asemigroup, bsemigroup, csemigroup) - implicit def semigroup4[A, B, C, D]( - implicit asemigroup: Semigroup[A], + implicit def semigroup4[A, B, C, D](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D] ): Semigroup[(A, B, C, D)] = new Tuple4Semigroup[A, B, C, D]()(asemigroup, bsemigroup, csemigroup, dsemigroup) - implicit def semigroup5[A, B, C, D, E]( - implicit asemigroup: Semigroup[A], + implicit def semigroup5[A, B, C, D, E](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4354,8 +4354,8 @@ trait GeneratedSemigroupImplicits { ): Semigroup[(A, B, C, D, E)] = new Tuple5Semigroup[A, B, C, D, E]()(asemigroup, bsemigroup, csemigroup, dsemigroup, esemigroup) - implicit def semigroup6[A, B, C, D, E, F]( - implicit asemigroup: Semigroup[A], + implicit def semigroup6[A, B, C, D, E, F](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4371,8 +4371,8 @@ trait GeneratedSemigroupImplicits { fsemigroup ) - implicit def semigroup7[A, B, C, D, E, F, G]( - implicit asemigroup: Semigroup[A], + implicit def semigroup7[A, B, C, D, E, F, G](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4390,8 +4390,8 @@ trait GeneratedSemigroupImplicits { gsemigroup ) - implicit def semigroup8[A, B, C, D, E, F, G, H]( - implicit asemigroup: Semigroup[A], + implicit def semigroup8[A, B, C, D, E, F, G, H](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4411,8 +4411,8 @@ trait GeneratedSemigroupImplicits { hsemigroup ) - implicit def semigroup9[A, B, C, D, E, F, G, H, I]( - implicit asemigroup: Semigroup[A], + implicit def semigroup9[A, B, C, D, E, F, G, H, I](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4434,8 +4434,8 @@ trait GeneratedSemigroupImplicits { isemigroup ) - implicit def semigroup10[A, B, C, D, E, F, G, H, I, J]( - implicit asemigroup: Semigroup[A], + implicit def semigroup10[A, B, C, D, E, F, G, H, I, J](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4459,8 +4459,8 @@ trait GeneratedSemigroupImplicits { jsemigroup ) - implicit def semigroup11[A, B, C, D, E, F, G, H, I, J, K]( - implicit asemigroup: Semigroup[A], + implicit def semigroup11[A, B, C, D, E, F, G, H, I, J, K](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4486,8 +4486,8 @@ trait GeneratedSemigroupImplicits { ksemigroup ) - implicit def semigroup12[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit asemigroup: Semigroup[A], + implicit def semigroup12[A, B, C, D, E, F, G, H, I, J, K, L](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4515,8 +4515,8 @@ trait GeneratedSemigroupImplicits { lsemigroup ) - implicit def semigroup13[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit asemigroup: Semigroup[A], + implicit def semigroup13[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4546,8 +4546,8 @@ trait GeneratedSemigroupImplicits { msemigroup ) - implicit def semigroup14[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit asemigroup: Semigroup[A], + implicit def semigroup14[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4579,8 +4579,8 @@ trait GeneratedSemigroupImplicits { nsemigroup ) - implicit def semigroup15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit asemigroup: Semigroup[A], + implicit def semigroup15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4614,8 +4614,8 @@ trait GeneratedSemigroupImplicits { osemigroup ) - implicit def semigroup16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit asemigroup: Semigroup[A], + implicit def semigroup16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4651,8 +4651,8 @@ trait GeneratedSemigroupImplicits { psemigroup ) - implicit def semigroup17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit asemigroup: Semigroup[A], + implicit def semigroup17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4690,8 +4690,8 @@ trait GeneratedSemigroupImplicits { qsemigroup ) - implicit def semigroup18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit asemigroup: Semigroup[A], + implicit def semigroup18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4731,8 +4731,8 @@ trait GeneratedSemigroupImplicits { rsemigroup ) - implicit def semigroup19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit asemigroup: Semigroup[A], + implicit def semigroup19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4774,8 +4774,8 @@ trait GeneratedSemigroupImplicits { ssemigroup ) - implicit def semigroup20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit asemigroup: Semigroup[A], + implicit def semigroup20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4819,8 +4819,8 @@ trait GeneratedSemigroupImplicits { tsemigroup ) - implicit def semigroup21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit asemigroup: Semigroup[A], + implicit def semigroup21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4866,8 +4866,8 @@ trait GeneratedSemigroupImplicits { usemigroup ) - implicit def semigroup22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit asemigroup: Semigroup[A], + implicit def semigroup22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4921,23 +4921,23 @@ trait GeneratedMonoidImplicits { implicit def monoid2[A, B](implicit amonoid: Monoid[A], bmonoid: Monoid[B]): Monoid[(A, B)] = new Tuple2Monoid[A, B]()(amonoid, bmonoid) - implicit def monoid3[A, B, C]( - implicit amonoid: Monoid[A], + implicit def monoid3[A, B, C](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C] ): Monoid[(A, B, C)] = new Tuple3Monoid[A, B, C]()(amonoid, bmonoid, cmonoid) - implicit def monoid4[A, B, C, D]( - implicit amonoid: Monoid[A], + implicit def monoid4[A, B, C, D](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D] ): Monoid[(A, B, C, D)] = new Tuple4Monoid[A, B, C, D]()(amonoid, bmonoid, cmonoid, dmonoid) - implicit def monoid5[A, B, C, D, E]( - implicit amonoid: Monoid[A], + implicit def monoid5[A, B, C, D, E](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4945,8 +4945,8 @@ trait GeneratedMonoidImplicits { ): Monoid[(A, B, C, D, E)] = new Tuple5Monoid[A, B, C, D, E]()(amonoid, bmonoid, cmonoid, dmonoid, emonoid) - implicit def monoid6[A, B, C, D, E, F]( - implicit amonoid: Monoid[A], + implicit def monoid6[A, B, C, D, E, F](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4955,8 +4955,8 @@ trait GeneratedMonoidImplicits { ): Monoid[(A, B, C, D, E, F)] = new Tuple6Monoid[A, B, C, D, E, F]()(amonoid, bmonoid, cmonoid, dmonoid, emonoid, fmonoid) - implicit def monoid7[A, B, C, D, E, F, G]( - implicit amonoid: Monoid[A], + implicit def monoid7[A, B, C, D, E, F, G](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4966,8 +4966,8 @@ trait GeneratedMonoidImplicits { ): Monoid[(A, B, C, D, E, F, G)] = new Tuple7Monoid[A, B, C, D, E, F, G]()(amonoid, bmonoid, cmonoid, dmonoid, emonoid, fmonoid, gmonoid) - implicit def monoid8[A, B, C, D, E, F, G, H]( - implicit amonoid: Monoid[A], + implicit def monoid8[A, B, C, D, E, F, G, H](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4987,8 +4987,8 @@ trait GeneratedMonoidImplicits { hmonoid ) - implicit def monoid9[A, B, C, D, E, F, G, H, I]( - implicit amonoid: Monoid[A], + implicit def monoid9[A, B, C, D, E, F, G, H, I](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5010,8 +5010,8 @@ trait GeneratedMonoidImplicits { imonoid ) - implicit def monoid10[A, B, C, D, E, F, G, H, I, J]( - implicit amonoid: Monoid[A], + implicit def monoid10[A, B, C, D, E, F, G, H, I, J](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5035,8 +5035,8 @@ trait GeneratedMonoidImplicits { jmonoid ) - implicit def monoid11[A, B, C, D, E, F, G, H, I, J, K]( - implicit amonoid: Monoid[A], + implicit def monoid11[A, B, C, D, E, F, G, H, I, J, K](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5062,8 +5062,8 @@ trait GeneratedMonoidImplicits { kmonoid ) - implicit def monoid12[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit amonoid: Monoid[A], + implicit def monoid12[A, B, C, D, E, F, G, H, I, J, K, L](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5091,8 +5091,8 @@ trait GeneratedMonoidImplicits { lmonoid ) - implicit def monoid13[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit amonoid: Monoid[A], + implicit def monoid13[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5122,8 +5122,8 @@ trait GeneratedMonoidImplicits { mmonoid ) - implicit def monoid14[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit amonoid: Monoid[A], + implicit def monoid14[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5155,8 +5155,8 @@ trait GeneratedMonoidImplicits { nmonoid ) - implicit def monoid15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit amonoid: Monoid[A], + implicit def monoid15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5190,8 +5190,8 @@ trait GeneratedMonoidImplicits { omonoid ) - implicit def monoid16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit amonoid: Monoid[A], + implicit def monoid16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5227,8 +5227,8 @@ trait GeneratedMonoidImplicits { pmonoid ) - implicit def monoid17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit amonoid: Monoid[A], + implicit def monoid17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5266,8 +5266,8 @@ trait GeneratedMonoidImplicits { qmonoid ) - implicit def monoid18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit amonoid: Monoid[A], + implicit def monoid18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5307,8 +5307,8 @@ trait GeneratedMonoidImplicits { rmonoid ) - implicit def monoid19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit amonoid: Monoid[A], + implicit def monoid19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5350,8 +5350,8 @@ trait GeneratedMonoidImplicits { smonoid ) - implicit def monoid20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit amonoid: Monoid[A], + implicit def monoid20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5395,8 +5395,8 @@ trait GeneratedMonoidImplicits { tmonoid ) - implicit def monoid21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit amonoid: Monoid[A], + implicit def monoid21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5442,8 +5442,8 @@ trait GeneratedMonoidImplicits { umonoid ) - implicit def monoid22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit amonoid: Monoid[A], + implicit def monoid22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5497,23 +5497,23 @@ trait GeneratedGroupImplicits { implicit def group2[A, B](implicit agroup: Group[A], bgroup: Group[B]): Group[(A, B)] = new Tuple2Group[A, B]()(agroup, bgroup) - implicit def group3[A, B, C]( - implicit agroup: Group[A], + implicit def group3[A, B, C](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C] ): Group[(A, B, C)] = new Tuple3Group[A, B, C]()(agroup, bgroup, cgroup) - implicit def group4[A, B, C, D]( - implicit agroup: Group[A], + implicit def group4[A, B, C, D](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D] ): Group[(A, B, C, D)] = new Tuple4Group[A, B, C, D]()(agroup, bgroup, cgroup, dgroup) - implicit def group5[A, B, C, D, E]( - implicit agroup: Group[A], + implicit def group5[A, B, C, D, E](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5521,8 +5521,8 @@ trait GeneratedGroupImplicits { ): Group[(A, B, C, D, E)] = new Tuple5Group[A, B, C, D, E]()(agroup, bgroup, cgroup, dgroup, egroup) - implicit def group6[A, B, C, D, E, F]( - implicit agroup: Group[A], + implicit def group6[A, B, C, D, E, F](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5531,8 +5531,8 @@ trait GeneratedGroupImplicits { ): Group[(A, B, C, D, E, F)] = new Tuple6Group[A, B, C, D, E, F]()(agroup, bgroup, cgroup, dgroup, egroup, fgroup) - implicit def group7[A, B, C, D, E, F, G]( - implicit agroup: Group[A], + implicit def group7[A, B, C, D, E, F, G](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5542,8 +5542,8 @@ trait GeneratedGroupImplicits { ): Group[(A, B, C, D, E, F, G)] = new Tuple7Group[A, B, C, D, E, F, G]()(agroup, bgroup, cgroup, dgroup, egroup, fgroup, ggroup) - implicit def group8[A, B, C, D, E, F, G, H]( - implicit agroup: Group[A], + implicit def group8[A, B, C, D, E, F, G, H](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5554,8 +5554,8 @@ trait GeneratedGroupImplicits { ): Group[(A, B, C, D, E, F, G, H)] = new Tuple8Group[A, B, C, D, E, F, G, H]()(agroup, bgroup, cgroup, dgroup, egroup, fgroup, ggroup, hgroup) - implicit def group9[A, B, C, D, E, F, G, H, I]( - implicit agroup: Group[A], + implicit def group9[A, B, C, D, E, F, G, H, I](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5577,8 +5577,8 @@ trait GeneratedGroupImplicits { igroup ) - implicit def group10[A, B, C, D, E, F, G, H, I, J]( - implicit agroup: Group[A], + implicit def group10[A, B, C, D, E, F, G, H, I, J](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5602,8 +5602,8 @@ trait GeneratedGroupImplicits { jgroup ) - implicit def group11[A, B, C, D, E, F, G, H, I, J, K]( - implicit agroup: Group[A], + implicit def group11[A, B, C, D, E, F, G, H, I, J, K](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5629,8 +5629,8 @@ trait GeneratedGroupImplicits { kgroup ) - implicit def group12[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit agroup: Group[A], + implicit def group12[A, B, C, D, E, F, G, H, I, J, K, L](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5658,8 +5658,8 @@ trait GeneratedGroupImplicits { lgroup ) - implicit def group13[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit agroup: Group[A], + implicit def group13[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5689,8 +5689,8 @@ trait GeneratedGroupImplicits { mgroup ) - implicit def group14[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit agroup: Group[A], + implicit def group14[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5722,8 +5722,8 @@ trait GeneratedGroupImplicits { ngroup ) - implicit def group15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit agroup: Group[A], + implicit def group15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5757,8 +5757,8 @@ trait GeneratedGroupImplicits { ogroup ) - implicit def group16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit agroup: Group[A], + implicit def group16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5794,8 +5794,8 @@ trait GeneratedGroupImplicits { pgroup ) - implicit def group17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit agroup: Group[A], + implicit def group17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5833,8 +5833,8 @@ trait GeneratedGroupImplicits { qgroup ) - implicit def group18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit agroup: Group[A], + implicit def group18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5874,8 +5874,8 @@ trait GeneratedGroupImplicits { rgroup ) - implicit def group19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit agroup: Group[A], + implicit def group19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5917,8 +5917,8 @@ trait GeneratedGroupImplicits { sgroup ) - implicit def group20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit agroup: Group[A], + implicit def group20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -5962,8 +5962,8 @@ trait GeneratedGroupImplicits { tgroup ) - implicit def group21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit agroup: Group[A], + implicit def group21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6009,8 +6009,8 @@ trait GeneratedGroupImplicits { ugroup ) - implicit def group22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit agroup: Group[A], + implicit def group22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6067,16 +6067,16 @@ trait GeneratedRingImplicits { implicit def ring3[A, B, C](implicit aring: Ring[A], bring: Ring[B], cring: Ring[C]): Ring[(A, B, C)] = new Tuple3Ring[A, B, C]()(aring, bring, cring) - implicit def ring4[A, B, C, D]( - implicit aring: Ring[A], + implicit def ring4[A, B, C, D](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D] ): Ring[(A, B, C, D)] = new Tuple4Ring[A, B, C, D]()(aring, bring, cring, dring) - implicit def ring5[A, B, C, D, E]( - implicit aring: Ring[A], + implicit def ring5[A, B, C, D, E](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6084,8 +6084,8 @@ trait GeneratedRingImplicits { ): Ring[(A, B, C, D, E)] = new Tuple5Ring[A, B, C, D, E]()(aring, bring, cring, dring, ering) - implicit def ring6[A, B, C, D, E, F]( - implicit aring: Ring[A], + implicit def ring6[A, B, C, D, E, F](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6094,8 +6094,8 @@ trait GeneratedRingImplicits { ): Ring[(A, B, C, D, E, F)] = new Tuple6Ring[A, B, C, D, E, F]()(aring, bring, cring, dring, ering, fring) - implicit def ring7[A, B, C, D, E, F, G]( - implicit aring: Ring[A], + implicit def ring7[A, B, C, D, E, F, G](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6105,8 +6105,8 @@ trait GeneratedRingImplicits { ): Ring[(A, B, C, D, E, F, G)] = new Tuple7Ring[A, B, C, D, E, F, G]()(aring, bring, cring, dring, ering, fring, gring) - implicit def ring8[A, B, C, D, E, F, G, H]( - implicit aring: Ring[A], + implicit def ring8[A, B, C, D, E, F, G, H](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6117,8 +6117,8 @@ trait GeneratedRingImplicits { ): Ring[(A, B, C, D, E, F, G, H)] = new Tuple8Ring[A, B, C, D, E, F, G, H]()(aring, bring, cring, dring, ering, fring, gring, hring) - implicit def ring9[A, B, C, D, E, F, G, H, I]( - implicit aring: Ring[A], + implicit def ring9[A, B, C, D, E, F, G, H, I](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6130,8 +6130,8 @@ trait GeneratedRingImplicits { ): Ring[(A, B, C, D, E, F, G, H, I)] = new Tuple9Ring[A, B, C, D, E, F, G, H, I]()(aring, bring, cring, dring, ering, fring, gring, hring, iring) - implicit def ring10[A, B, C, D, E, F, G, H, I, J]( - implicit aring: Ring[A], + implicit def ring10[A, B, C, D, E, F, G, H, I, J](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6155,8 +6155,8 @@ trait GeneratedRingImplicits { jring ) - implicit def ring11[A, B, C, D, E, F, G, H, I, J, K]( - implicit aring: Ring[A], + implicit def ring11[A, B, C, D, E, F, G, H, I, J, K](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6182,8 +6182,8 @@ trait GeneratedRingImplicits { kring ) - implicit def ring12[A, B, C, D, E, F, G, H, I, J, K, L]( - implicit aring: Ring[A], + implicit def ring12[A, B, C, D, E, F, G, H, I, J, K, L](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6211,8 +6211,8 @@ trait GeneratedRingImplicits { lring ) - implicit def ring13[A, B, C, D, E, F, G, H, I, J, K, L, M]( - implicit aring: Ring[A], + implicit def ring13[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6242,8 +6242,8 @@ trait GeneratedRingImplicits { mring ) - implicit def ring14[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( - implicit aring: Ring[A], + implicit def ring14[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6275,8 +6275,8 @@ trait GeneratedRingImplicits { nring ) - implicit def ring15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( - implicit aring: Ring[A], + implicit def ring15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6310,8 +6310,8 @@ trait GeneratedRingImplicits { oring ) - implicit def ring16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( - implicit aring: Ring[A], + implicit def ring16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6347,8 +6347,8 @@ trait GeneratedRingImplicits { pring ) - implicit def ring17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( - implicit aring: Ring[A], + implicit def ring17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6386,8 +6386,8 @@ trait GeneratedRingImplicits { qring ) - implicit def ring18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( - implicit aring: Ring[A], + implicit def ring18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6427,8 +6427,8 @@ trait GeneratedRingImplicits { rring ) - implicit def ring19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( - implicit aring: Ring[A], + implicit def ring19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6470,8 +6470,8 @@ trait GeneratedRingImplicits { sring ) - implicit def ring20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( - implicit aring: Ring[A], + implicit def ring20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6515,8 +6515,8 @@ trait GeneratedRingImplicits { tring ) - implicit def ring21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( - implicit aring: Ring[A], + implicit def ring21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6562,8 +6562,8 @@ trait GeneratedRingImplicits { uring ) - implicit def ring22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( - implicit aring: Ring[A], + implicit def ring22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], diff --git a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala index 3ca72daba..60a820f77 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala @@ -6,8 +6,8 @@ import ArrayBufferedOperation.fromSumOption /** * Combine 2 semigroups into a product semigroup */ -class Product2Semigroup[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( - implicit asemigroup: Semigroup[A], +class Product2Semigroup[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B] ) extends Semigroup[X] { override def plus(l: X, r: X): X = { @@ -30,8 +30,8 @@ class Product2Semigroup[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B) /** * Combine 2 monoids into a product monoid */ -class Product2Monoid[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( - implicit amonoid: Monoid[A], +class Product2Monoid[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])(implicit + amonoid: Monoid[A], bmonoid: Monoid[B] ) extends Product2Semigroup[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)]) with Monoid[X] { @@ -41,8 +41,8 @@ class Product2Monoid[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( /** * Combine 2 groups into a product group */ -class Product2Group[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( - implicit agroup: Group[A], +class Product2Group[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])(implicit + agroup: Group[A], bgroup: Group[B] ) extends Product2Monoid[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)]) with Group[X] { @@ -59,8 +59,8 @@ class Product2Group[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( /** * Combine 2 rings into a product ring */ -class Product2Ring[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( - implicit aring: Ring[A], +class Product2Ring[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])(implicit + aring: Ring[A], bring: Ring[B] ) extends Product2Group[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)]) with Ring[X] { @@ -74,8 +74,8 @@ class Product2Ring[X, A, B](apply: (A, B) => X, unapply: X => Option[(A, B)])( /** * Combine 3 semigroups into a product semigroup */ -class Product3Semigroup[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])( - implicit asemigroup: Semigroup[A], +class Product3Semigroup[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C] ) extends Semigroup[X] { @@ -104,8 +104,8 @@ class Product3Semigroup[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[ /** * Combine 3 monoids into a product monoid */ -class Product3Monoid[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])( - implicit amonoid: Monoid[A], +class Product3Monoid[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C] ) extends Product3Semigroup[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)]) @@ -116,8 +116,8 @@ class Product3Monoid[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, /** * Combine 3 groups into a product group */ -class Product3Group[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])( - implicit agroup: Group[A], +class Product3Group[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C] ) extends Product3Monoid[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)]) @@ -139,8 +139,8 @@ class Product3Group[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, /** * Combine 3 rings into a product ring */ -class Product3Ring[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])( - implicit aring: Ring[A], +class Product3Ring[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)])(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C] ) extends Product3Group[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B, C)]) @@ -159,8 +159,8 @@ class Product3Ring[X, A, B, C](apply: (A, B, C) => X, unapply: X => Option[(A, B /** * Combine 4 semigroups into a product semigroup */ -class Product4Semigroup[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])( - implicit asemigroup: Semigroup[A], +class Product4Semigroup[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D] @@ -193,8 +193,8 @@ class Product4Semigroup[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => O /** * Combine 4 monoids into a product monoid */ -class Product4Monoid[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])( - implicit amonoid: Monoid[A], +class Product4Monoid[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D] @@ -207,8 +207,8 @@ class Product4Monoid[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Opti /** * Combine 4 groups into a product group */ -class Product4Group[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])( - implicit agroup: Group[A], +class Product4Group[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D] @@ -232,8 +232,8 @@ class Product4Group[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Optio /** * Combine 4 rings into a product ring */ -class Product4Ring[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])( - implicit aring: Ring[A], +class Product4Ring[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option[(A, B, C, D)])(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D] @@ -255,7 +255,8 @@ class Product4Ring[X, A, B, C, D](apply: (A, B, C, D) => X, unapply: X => Option * Combine 5 semigroups into a product semigroup */ class Product5Semigroup[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X => Option[(A, B, C, D, E)])( - implicit asemigroup: Semigroup[A], + implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -292,7 +293,8 @@ class Product5Semigroup[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: * Combine 5 monoids into a product monoid */ class Product5Monoid[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X => Option[(A, B, C, D, E)])( - implicit amonoid: Monoid[A], + implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -310,7 +312,8 @@ class Product5Monoid[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X = * Combine 5 groups into a product group */ class Product5Group[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X => Option[(A, B, C, D, E)])( - implicit agroup: Group[A], + implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -346,7 +349,8 @@ class Product5Group[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X => * Combine 5 rings into a product ring */ class Product5Ring[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X => Option[(A, B, C, D, E)])( - implicit aring: Ring[A], + implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -376,8 +380,8 @@ class Product5Ring[X, A, B, C, D, E](apply: (A, B, C, D, E) => X, unapply: X => class Product6Semigroup[X, A, B, C, D, E, F]( apply: (A, B, C, D, E, F) => X, unapply: X => Option[(A, B, C, D, E, F)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -421,8 +425,8 @@ class Product6Semigroup[X, A, B, C, D, E, F]( class Product6Monoid[X, A, B, C, D, E, F]( apply: (A, B, C, D, E, F) => X, unapply: X => Option[(A, B, C, D, E, F)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -443,8 +447,8 @@ class Product6Monoid[X, A, B, C, D, E, F]( class Product6Group[X, A, B, C, D, E, F]( apply: (A, B, C, D, E, F) => X, unapply: X => Option[(A, B, C, D, E, F)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -512,8 +516,8 @@ class Product6Ring[X, A, B, C, D, E, F]( class Product7Semigroup[X, A, B, C, D, E, F, G]( apply: (A, B, C, D, E, F, G) => X, unapply: X => Option[(A, B, C, D, E, F, G)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -569,8 +573,8 @@ class Product7Semigroup[X, A, B, C, D, E, F, G]( class Product7Monoid[X, A, B, C, D, E, F, G]( apply: (A, B, C, D, E, F, G) => X, unapply: X => Option[(A, B, C, D, E, F, G)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -592,8 +596,8 @@ class Product7Monoid[X, A, B, C, D, E, F, G]( class Product7Group[X, A, B, C, D, E, F, G]( apply: (A, B, C, D, E, F, G) => X, unapply: X => Option[(A, B, C, D, E, F, G)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -637,8 +641,8 @@ class Product7Group[X, A, B, C, D, E, F, G]( class Product7Ring[X, A, B, C, D, E, F, G]( apply: (A, B, C, D, E, F, G) => X, unapply: X => Option[(A, B, C, D, E, F, G)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -672,8 +676,8 @@ class Product7Ring[X, A, B, C, D, E, F, G]( class Product8Semigroup[X, A, B, C, D, E, F, G, H]( apply: (A, B, C, D, E, F, G, H) => X, unapply: X => Option[(A, B, C, D, E, F, G, H)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -733,8 +737,8 @@ class Product8Semigroup[X, A, B, C, D, E, F, G, H]( class Product8Monoid[X, A, B, C, D, E, F, G, H]( apply: (A, B, C, D, E, F, G, H) => X, unapply: X => Option[(A, B, C, D, E, F, G, H)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -766,8 +770,8 @@ class Product8Monoid[X, A, B, C, D, E, F, G, H]( class Product8Group[X, A, B, C, D, E, F, G, H]( apply: (A, B, C, D, E, F, G, H) => X, unapply: X => Option[(A, B, C, D, E, F, G, H)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -814,8 +818,8 @@ class Product8Group[X, A, B, C, D, E, F, G, H]( class Product8Ring[X, A, B, C, D, E, F, G, H]( apply: (A, B, C, D, E, F, G, H) => X, unapply: X => Option[(A, B, C, D, E, F, G, H)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -851,8 +855,8 @@ class Product8Ring[X, A, B, C, D, E, F, G, H]( class Product9Semigroup[X, A, B, C, D, E, F, G, H, I]( apply: (A, B, C, D, E, F, G, H, I) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -916,8 +920,8 @@ class Product9Semigroup[X, A, B, C, D, E, F, G, H, I]( class Product9Monoid[X, A, B, C, D, E, F, G, H, I]( apply: (A, B, C, D, E, F, G, H, I) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -951,8 +955,8 @@ class Product9Monoid[X, A, B, C, D, E, F, G, H, I]( class Product9Group[X, A, B, C, D, E, F, G, H, I]( apply: (A, B, C, D, E, F, G, H, I) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1002,8 +1006,8 @@ class Product9Group[X, A, B, C, D, E, F, G, H, I]( class Product9Ring[X, A, B, C, D, E, F, G, H, I]( apply: (A, B, C, D, E, F, G, H, I) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1041,8 +1045,8 @@ class Product9Ring[X, A, B, C, D, E, F, G, H, I]( class Product10Semigroup[X, A, B, C, D, E, F, G, H, I, J]( apply: (A, B, C, D, E, F, G, H, I, J) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1111,8 +1115,8 @@ class Product10Semigroup[X, A, B, C, D, E, F, G, H, I, J]( class Product10Monoid[X, A, B, C, D, E, F, G, H, I, J]( apply: (A, B, C, D, E, F, G, H, I, J) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1148,8 +1152,8 @@ class Product10Monoid[X, A, B, C, D, E, F, G, H, I, J]( class Product10Group[X, A, B, C, D, E, F, G, H, I, J]( apply: (A, B, C, D, E, F, G, H, I, J) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1202,8 +1206,8 @@ class Product10Group[X, A, B, C, D, E, F, G, H, I, J]( class Product10Ring[X, A, B, C, D, E, F, G, H, I, J]( apply: (A, B, C, D, E, F, G, H, I, J) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1254,8 +1258,8 @@ class Product10Ring[X, A, B, C, D, E, F, G, H, I, J]( class Product11Semigroup[X, A, B, C, D, E, F, G, H, I, J, K]( apply: (A, B, C, D, E, F, G, H, I, J, K) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1328,8 +1332,8 @@ class Product11Semigroup[X, A, B, C, D, E, F, G, H, I, J, K]( class Product11Monoid[X, A, B, C, D, E, F, G, H, I, J, K]( apply: (A, B, C, D, E, F, G, H, I, J, K) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1367,8 +1371,8 @@ class Product11Monoid[X, A, B, C, D, E, F, G, H, I, J, K]( class Product11Group[X, A, B, C, D, E, F, G, H, I, J, K]( apply: (A, B, C, D, E, F, G, H, I, J, K) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1424,8 +1428,8 @@ class Product11Group[X, A, B, C, D, E, F, G, H, I, J, K]( class Product11Ring[X, A, B, C, D, E, F, G, H, I, J, K]( apply: (A, B, C, D, E, F, G, H, I, J, K) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1479,8 +1483,8 @@ class Product11Ring[X, A, B, C, D, E, F, G, H, I, J, K]( class Product12Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L]( apply: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1557,8 +1561,8 @@ class Product12Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L]( class Product12Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L]( apply: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1598,8 +1602,8 @@ class Product12Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L]( class Product12Group[X, A, B, C, D, E, F, G, H, I, J, K, L]( apply: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1658,8 +1662,8 @@ class Product12Group[X, A, B, C, D, E, F, G, H, I, J, K, L]( class Product12Ring[X, A, B, C, D, E, F, G, H, I, J, K, L]( apply: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1716,8 +1720,8 @@ class Product12Ring[X, A, B, C, D, E, F, G, H, I, J, K, L]( class Product13Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -1799,8 +1803,8 @@ class Product13Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( class Product13Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -1842,8 +1846,8 @@ class Product13Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( class Product13Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -1905,8 +1909,8 @@ class Product13Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( class Product13Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -1966,8 +1970,8 @@ class Product13Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( class Product14Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2053,8 +2057,8 @@ class Product14Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( class Product14Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2098,8 +2102,8 @@ class Product14Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( class Product14Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2164,8 +2168,8 @@ class Product14Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( class Product14Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -2228,8 +2232,8 @@ class Product14Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( class Product15Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2319,8 +2323,8 @@ class Product15Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( class Product15Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2366,8 +2370,8 @@ class Product15Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( class Product15Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2435,8 +2439,8 @@ class Product15Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( class Product15Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -2502,8 +2506,8 @@ class Product15Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( class Product16Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2598,8 +2602,8 @@ class Product16Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( class Product16Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2647,8 +2651,8 @@ class Product16Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( class Product16Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -2719,8 +2723,8 @@ class Product16Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( class Product16Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -2789,8 +2793,8 @@ class Product16Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( class Product17Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -2889,8 +2893,8 @@ class Product17Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( class Product17Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -2940,8 +2944,8 @@ class Product17Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( class Product17Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3015,8 +3019,8 @@ class Product17Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( class Product17Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3088,8 +3092,8 @@ class Product17Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( class Product18Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -3192,8 +3196,8 @@ class Product18Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R class Product18Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -3245,8 +3249,8 @@ class Product18Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( class Product18Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3323,8 +3327,8 @@ class Product18Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( class Product18Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3399,8 +3403,8 @@ class Product18Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( class Product19Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -3508,8 +3512,8 @@ class Product19Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R class Product19Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -3563,8 +3567,8 @@ class Product19Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S class Product19Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3644,8 +3648,8 @@ class Product19Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] class Product19Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -3723,8 +3727,8 @@ class Product19Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( class Product20Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -3836,8 +3840,8 @@ class Product20Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R class Product20Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -3893,8 +3897,8 @@ class Product20Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S class Product20Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -3977,8 +3981,8 @@ class Product20Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, class Product20Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -4059,8 +4063,8 @@ class Product20Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, class Product21Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4176,8 +4180,8 @@ class Product21Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R class Product21Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4235,8 +4239,8 @@ class Product21Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S class Product21Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -4322,8 +4326,8 @@ class Product21Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, class Product21Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -4407,8 +4411,8 @@ class Product21Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, class Product22Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] -)( - implicit asemigroup: Semigroup[A], +)(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4529,8 +4533,8 @@ class Product22Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R class Product22Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] -)( - implicit amonoid: Monoid[A], +)(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -4590,8 +4594,8 @@ class Product22Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S class Product22Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] -)( - implicit agroup: Group[A], +)(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -4680,8 +4684,8 @@ class Product22Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, class Product22Ring[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( apply: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapply: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] -)( - implicit aring: Ring[A], +)(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -4775,16 +4779,16 @@ trait ProductSemigroups { )(implicit asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C]): Semigroup[X] = new Product3Semigroup[X, A, B, C](applyX, unapplyX)(asemigroup, bsemigroup, csemigroup) - def apply[X, A, B, C, D](applyX: (A, B, C, D) => X, unapplyX: X => Option[(A, B, C, D)])( - implicit asemigroup: Semigroup[A], + def apply[X, A, B, C, D](applyX: (A, B, C, D) => X, unapplyX: X => Option[(A, B, C, D)])(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D] ): Semigroup[X] = new Product4Semigroup[X, A, B, C, D](applyX, unapplyX)(asemigroup, bsemigroup, csemigroup, dsemigroup) - def apply[X, A, B, C, D, E](applyX: (A, B, C, D, E) => X, unapplyX: X => Option[(A, B, C, D, E)])( - implicit asemigroup: Semigroup[A], + def apply[X, A, B, C, D, E](applyX: (A, B, C, D, E) => X, unapplyX: X => Option[(A, B, C, D, E)])(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4799,7 +4803,8 @@ trait ProductSemigroups { ) def apply[X, A, B, C, D, E, F](applyX: (A, B, C, D, E, F) => X, unapplyX: X => Option[(A, B, C, D, E, F)])( - implicit asemigroup: Semigroup[A], + implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4818,8 +4823,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G]( applyX: (A, B, C, D, E, F, G) => X, unapplyX: X => Option[(A, B, C, D, E, F, G)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4840,8 +4845,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H]( applyX: (A, B, C, D, E, F, G, H) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4864,8 +4869,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I]( applyX: (A, B, C, D, E, F, G, H, I) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4890,8 +4895,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J]( applyX: (A, B, C, D, E, F, G, H, I, J) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4918,8 +4923,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K]( applyX: (A, B, C, D, E, F, G, H, I, J, K) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4948,8 +4953,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -4980,8 +4985,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5014,8 +5019,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5050,8 +5055,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5088,8 +5093,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5128,8 +5133,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5170,8 +5175,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5214,8 +5219,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5260,8 +5265,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5308,8 +5313,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5361,8 +5366,8 @@ trait ProductSemigroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] - )( - implicit asemigroup: Semigroup[A], + )(implicit + asemigroup: Semigroup[A], bsemigroup: Semigroup[B], csemigroup: Semigroup[C], dsemigroup: Semigroup[D], @@ -5434,8 +5439,8 @@ trait ProductMonoids { )(implicit amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D]): Monoid[X] = new Product4Monoid[X, A, B, C, D](applyX, unapplyX)(amonoid, bmonoid, cmonoid, dmonoid) - def apply[X, A, B, C, D, E](applyX: (A, B, C, D, E) => X, unapplyX: X => Option[(A, B, C, D, E)])( - implicit amonoid: Monoid[A], + def apply[X, A, B, C, D, E](applyX: (A, B, C, D, E) => X, unapplyX: X => Option[(A, B, C, D, E)])(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5444,7 +5449,8 @@ trait ProductMonoids { new Product5Monoid[X, A, B, C, D, E](applyX, unapplyX)(amonoid, bmonoid, cmonoid, dmonoid, emonoid) def apply[X, A, B, C, D, E, F](applyX: (A, B, C, D, E, F) => X, unapplyX: X => Option[(A, B, C, D, E, F)])( - implicit amonoid: Monoid[A], + implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5463,8 +5469,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G]( applyX: (A, B, C, D, E, F, G) => X, unapplyX: X => Option[(A, B, C, D, E, F, G)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5485,8 +5491,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H]( applyX: (A, B, C, D, E, F, G, H) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5509,8 +5515,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I]( applyX: (A, B, C, D, E, F, G, H, I) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5535,8 +5541,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J]( applyX: (A, B, C, D, E, F, G, H, I, J) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5563,8 +5569,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K]( applyX: (A, B, C, D, E, F, G, H, I, J, K) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5593,8 +5599,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5625,8 +5631,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5659,8 +5665,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5695,8 +5701,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5733,8 +5739,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5773,8 +5779,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5815,8 +5821,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5859,8 +5865,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5905,8 +5911,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -5953,8 +5959,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -6003,8 +6009,8 @@ trait ProductMonoids { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] - )( - implicit amonoid: Monoid[A], + )(implicit + amonoid: Monoid[A], bmonoid: Monoid[B], cmonoid: Monoid[C], dmonoid: Monoid[D], @@ -6076,8 +6082,8 @@ trait ProductGroups { )(implicit agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D]): Group[X] = new Product4Group[X, A, B, C, D](applyX, unapplyX)(agroup, bgroup, cgroup, dgroup) - def apply[X, A, B, C, D, E](applyX: (A, B, C, D, E) => X, unapplyX: X => Option[(A, B, C, D, E)])( - implicit agroup: Group[A], + def apply[X, A, B, C, D, E](applyX: (A, B, C, D, E) => X, unapplyX: X => Option[(A, B, C, D, E)])(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6086,7 +6092,8 @@ trait ProductGroups { new Product5Group[X, A, B, C, D, E](applyX, unapplyX)(agroup, bgroup, cgroup, dgroup, egroup) def apply[X, A, B, C, D, E, F](applyX: (A, B, C, D, E, F) => X, unapplyX: X => Option[(A, B, C, D, E, F)])( - implicit agroup: Group[A], + implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6098,8 +6105,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G]( applyX: (A, B, C, D, E, F, G) => X, unapplyX: X => Option[(A, B, C, D, E, F, G)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6120,8 +6127,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H]( applyX: (A, B, C, D, E, F, G, H) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6144,8 +6151,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I]( applyX: (A, B, C, D, E, F, G, H, I) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6170,8 +6177,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J]( applyX: (A, B, C, D, E, F, G, H, I, J) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6198,8 +6205,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K]( applyX: (A, B, C, D, E, F, G, H, I, J, K) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6228,8 +6235,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6260,8 +6267,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6294,8 +6301,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6330,8 +6337,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6368,8 +6375,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6408,8 +6415,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6450,8 +6457,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6494,8 +6501,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6540,8 +6547,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6588,8 +6595,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6638,8 +6645,8 @@ trait ProductGroups { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] - )( - implicit agroup: Group[A], + )(implicit + agroup: Group[A], bgroup: Group[B], cgroup: Group[C], dgroup: Group[D], @@ -6715,7 +6722,8 @@ trait ProductRings { new Product5Ring[X, A, B, C, D, E](applyX, unapplyX)(aring, bring, cring, dring, ering) def apply[X, A, B, C, D, E, F](applyX: (A, B, C, D, E, F) => X, unapplyX: X => Option[(A, B, C, D, E, F)])( - implicit aring: Ring[A], + implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6727,8 +6735,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G]( applyX: (A, B, C, D, E, F, G) => X, unapplyX: X => Option[(A, B, C, D, E, F, G)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6749,8 +6757,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H]( applyX: (A, B, C, D, E, F, G, H) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6773,8 +6781,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I]( applyX: (A, B, C, D, E, F, G, H, I) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6799,8 +6807,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J]( applyX: (A, B, C, D, E, F, G, H, I, J) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6827,8 +6835,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K]( applyX: (A, B, C, D, E, F, G, H, I, J, K) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6857,8 +6865,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6889,8 +6897,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6923,8 +6931,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6959,8 +6967,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -6997,8 +7005,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -7037,8 +7045,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -7079,8 +7087,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -7123,8 +7131,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -7169,8 +7177,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -7217,8 +7225,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], @@ -7267,8 +7275,8 @@ trait ProductRings { def apply[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( applyX: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => X, unapplyX: X => Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] - )( - implicit aring: Ring[A], + )(implicit + aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index d9c574946..376c5ea21 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -373,7 +373,7 @@ sealed abstract class HLL extends java.io.Serializable { else { val reducedSize = 1 << reducedBits // bit mask to set MSBs to 1. Makes rhoW exit fast - val bitMask = 0xFFFFFFFF << bits + val bitMask = 0xffffffff << bits val buf = new Array[Byte](4) downsize(reducedBits, reducedSize, bitMask, buf) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala index 328e00c33..ef500d27b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala @@ -92,10 +92,13 @@ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { else { monoid.sum(rows.iterator.zipWithIndex.map { case (map, i) => - SparseHLL(bits, map.transform { - case _ => - Max((i + 1).toByte) - }) + SparseHLL( + bits, + map.transform { + case _ => + Max((i + 1).toByte) + } + ) }) } } @@ -153,10 +156,13 @@ class HyperLogLogSeriesMonoid(val bits: Int) extends Monoid[HLLSeries] { } else { left.foldLeft(right) { case (m, (k, lv)) => - m.updated(k, m.get(k) match { - case None => lv - case Some(rv) => Math.max(lv, rv) - }) + m.updated( + k, + m.get(k) match { + case None => lv + case Some(rv) => Math.max(lv, rv) + } + ) } } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala index 33094f881..abbdc80ad 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala @@ -385,8 +385,8 @@ case class Intersection[L[t] <: Lower[t], U[t] <: Upper[t], T](lower: L[T], uppe * it this way, it does not mean it is empty or universe, etc... (there * are other cases). */ - def toLeftClosedRightOpen( - implicit s: Successible[T] + def toLeftClosedRightOpen(implicit + s: Successible[T] ): Option[Intersection[InclusiveLower, ExclusiveUpper, T]] = for { l <- lower.least diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala index ed2616264..39a7ba437 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala @@ -239,13 +239,16 @@ object MapAlgebra { /** join the keys of two maps (similar to outer-join in a DB) */ def join[K, V, W](map1: Map[K, V], map2: Map[K, W]): Map[K, (Option[V], Option[W])] = Monoid - .plus(map1.transform { - case (_, v) => - (List(v), List[W]()) - }, map2.transform { - case (_, w) => - (List[V](), List(w)) - }) + .plus( + map1.transform { + case (_, v) => + (List(v), List[W]()) + }, + map2.transform { + case (_, w) => + (List[V](), List(w)) + } + ) .transform { case (_, (v, w)) => (v.headOption, w.headOption) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index 2c96aeda3..0f75f2b53 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -97,8 +97,8 @@ object MomentsGroup extends Group[Moments] with CommutativeGroup[Moments] { if (n < k) getCombinedMean(k, ak, n, an) else (n + k) match { - case 0L => 0.0 - case newCount if (newCount == n) => an + case 0L => 0.0 + case newCount if newCount == n => an case newCount => val scaling = k.toDouble / newCount // a_n + (a_k - a_n)*(k/(n+k)) is only stable if n is not approximately k diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index 3962a0321..4a2aa6366 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -42,7 +42,7 @@ trait Monoid[@specialized(Int, Long, Float, Double) T] extends Semigroup[T] with AMonoid[T] with AdditiveMonoid[T] { - def isNonZero(v: T): Boolean = (v != zero) + def isNonZero(v: T): Boolean = v != zero def assertNotZero(v: T): Unit = if (!isNonZero(v)) { throw new java.lang.IllegalArgumentException("argument should not be zero") diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala b/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala index 6a5c4653e..23ca3bc23 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MurmurHash.scala @@ -39,8 +39,11 @@ case class MurmurHash128(seed: Long) extends AnyVal { apply(8, _.asDoubleBuffer.put(value)) def apply(string: CharSequence): (Long, Long) = - apply(string.length * 2, { buffer => - val charBuffer = buffer.asCharBuffer - 0.to(string.length - 1).foreach(i => charBuffer.put(string.charAt(i))) - }) + apply( + string.length * 2, + { buffer => + val charBuffer = buffer.asCharBuffer + 0.to(string.length - 1).foreach(i => charBuffer.put(string.charAt(i))) + } + ) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index 3db3a4c0d..24ba33644 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -122,8 +122,8 @@ object QTree { ) } - private def mergeOptions[A](aNullable: QTree[A], bNullable: QTree[A])( - implicit monoid: Monoid[A] + private def mergeOptions[A](aNullable: QTree[A], bNullable: QTree[A])(implicit + monoid: Monoid[A] ): QTree[A] = if (aNullable != null) { if (bNullable != null) { @@ -485,8 +485,11 @@ object QTreeAggregator { * The items that are iterated over to produce this approximation cannot be negative. * Returns an Intersection which represents the bounded approximation. */ -case class QTreeAggregator[T](override val percentile: Double, override val k: Int = QTreeAggregator.DefaultK)( - implicit override val num: Numeric[T] +case class QTreeAggregator[T]( + override val percentile: Double, + override val k: Int = QTreeAggregator.DefaultK +)(implicit + override val num: Numeric[T] ) extends Aggregator[T, QTree[Unit], Intersection[InclusiveLower, InclusiveUpper, Double]] with QTreeAggregatorLike[T] { @@ -505,8 +508,8 @@ case class QTreeAggregator[T](override val percentile: Double, override val k: I case class QTreeAggregatorLowerBound[T]( override val percentile: Double, override val k: Int = QTreeAggregator.DefaultK -)( - implicit override val num: Numeric[T] +)(implicit + override val num: Numeric[T] ) extends Aggregator[T, QTree[Unit], Double] with QTreeAggregatorLike[T] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala index 5da8587ab..3cff3ebec 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala @@ -40,8 +40,8 @@ object RightFolded2 { new RightFolded2Monoid[In, Out, Acc](foldfn, trans) } -class RightFolded2Monoid[In, Out, Acc](foldfn: (In, Out) => Out, accfn: (Out) => Acc)( - implicit grpAcc: Group[Acc] +class RightFolded2Monoid[In, Out, Acc](foldfn: (In, Out) => Out, accfn: (Out) => Acc)(implicit + grpAcc: Group[Acc] ) extends Monoid[RightFolded2[In, Out, Acc]] { override val zero: RightFoldedZero2.type = RightFoldedZero2 diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala index 562442703..30a287749 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala @@ -156,8 +156,8 @@ object Semigroup } @tailrec - private def intTimesRec[T](i: BigInt, v: T, pow: Int, vaccMemo: (T, Vector[T]))( - implicit sg: Semigroup[T] + private def intTimesRec[T](i: BigInt, v: T, pow: Int, vaccMemo: (T, Vector[T]))(implicit + sg: Semigroup[T] ): T = if (i == 0) { vaccMemo._1 diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala index 738567a5a..9189f27a5 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala @@ -37,8 +37,8 @@ case class SketchMapHash[K](hasher: CMSHash[Long], seed: Int)(implicit serializa /** * Responsible for creating instances of SketchMap. */ -class SketchMapMonoid[K, V](val params: SketchMapParams[K])( - implicit valueOrdering: Ordering[V], +class SketchMapMonoid[K, V](val params: SketchMapParams[K])(implicit + valueOrdering: Ordering[V], monoid: Monoid[V] ) extends Monoid[SketchMap[K, V]] with CommutativeMonoid[SketchMap[K, V]] { @@ -134,8 +134,8 @@ class SketchMapMonoid[K, V](val params: SketchMapParams[K])( /** * Convenience class for holding constant parameters of a Sketch Map. */ -case class SketchMapParams[K](seed: Int, width: Int, depth: Int, heavyHittersCount: Int)( - implicit serialization: K => Array[Byte] +case class SketchMapParams[K](seed: Int, width: Int, depth: Int, heavyHittersCount: Int)(implicit + serialization: K => Array[Byte] ) { assert(0 < width, "width must be greater than 0") assert(0 < depth, "depth must be greater than 0") @@ -177,8 +177,8 @@ object SketchMapParams { /** * Overloaded apply method for convenience. */ - def apply[K](seed: Int, eps: Double, delta: Double, heavyHittersCount: Int)( - implicit serialization: K => Array[Byte] + def apply[K](seed: Int, eps: Double, delta: Double, heavyHittersCount: Int)(implicit + serialization: K => Array[Byte] ): SketchMapParams[K] = SketchMapParams[K](seed, width(eps), depth(delta), heavyHittersCount)(serialization) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 18b104f2a..591bfb5df 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -50,9 +50,9 @@ object SpaceSaver { buffer case SSMany( - capacity, - counters, - _ + capacity, + counters, + _ ) => //We do not care about the buckets are thery are created by SSMany.apply val buffer = scala.collection.mutable.ArrayBuffer.newBuilder[Byte] buffer += (2: Byte) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala index 2c8d540de..b67f8203f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala @@ -22,8 +22,8 @@ case class TopK[N](size: Int, items: List[N], max: Option[N]) object TopKMonoid extends java.io.Serializable { // Does a merge sort and returns the reversed list @tailrec - private[algebird] def mergeSortR[T](acc: List[T], list1: List[T], list2: List[T], cnt: Int)( - implicit ord: Ordering[T] + private[algebird] def mergeSortR[T](acc: List[T], list1: List[T], list2: List[T], cnt: Int)(implicit + ord: Ordering[T] ): List[T] = (list1, list2, cnt) match { case (_, _, 0) => acc diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala index 929694a66..8e2465eb7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala @@ -32,8 +32,8 @@ sealed trait StateWithError[S, +F, +T] { ): StateWithError[S, F1, (T, U)] = join(that)(Semigroup.from(mergeErr), Semigroup.from(mergeState)) - def join[F1 >: F, U](that: StateWithError[S, F1, U])( - implicit sgf: Semigroup[F1], + def join[F1 >: F, U](that: StateWithError[S, F1, U])(implicit + sgf: Semigroup[F1], sgs: Semigroup[S] ): // TODO: deep joins could blow the stack, not yet using trampoline here StateWithError[S, F1, (T, U)] = diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala index e164ddd27..eadbcd899 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala @@ -3,8 +3,7 @@ package com.twitter.algebird.generic import shapeless._ object EquivOrdering extends EquivOrdering1 { - implicit def hconsOrdering[A, B <: HList]( - implicit + implicit def hconsOrdering[A, B <: HList](implicit a: Ordering[A], lb: Lazy[Ordering[B]] ): Ordering[A :: B] = diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala index c8b09164b..c64cd62cd 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala @@ -15,8 +15,7 @@ abstract class Shapeless3 extends Shapeless2 { /** * Pairwise ring for arbitrary heterogeneous lists (HList). */ - implicit def hconsRing[A, B <: HList]( - implicit + implicit def hconsRing[A, B <: HList](implicit a: Ring[A], lb: Lazy[Ring[B]] ): Ring[A :: B] = @@ -40,8 +39,7 @@ abstract class Shapeless2 extends Shapeless1 { /** * Pairwise group for arbitrary heterogeneous lists (HList). */ - implicit def hconsGroup[A, B <: HList]( - implicit + implicit def hconsGroup[A, B <: HList](implicit a: Group[A], lb: Lazy[Group[B]] ): Group[A :: B] = @@ -65,8 +63,7 @@ abstract class Shapeless1 extends Shapeless0 { /** * Pairwise monoid for arbitrary heterogeneous lists (HList). */ - implicit def hconsMonoid[A, B <: HList]( - implicit + implicit def hconsMonoid[A, B <: HList](implicit a: Monoid[A], lb: Lazy[Monoid[B]] ): Monoid[A :: B] = @@ -90,8 +87,7 @@ abstract class Shapeless0 { /** * Pairwise monoid for arbitrary heterogeneous lists (HList). */ - implicit def hconsSemigroup[A, B <: HList]( - implicit + implicit def hconsSemigroup[A, B <: HList](implicit a: Semigroup[A], lb: Lazy[Semigroup[B]] ): Semigroup[A :: B] = diff --git a/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala b/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala index 65bb32b3f..a1e467f47 100644 --- a/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala +++ b/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala @@ -92,8 +92,8 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for * T. */ - def sumByKey[K: ClassTag, V: ClassTag: Semigroup]( - implicit ev: T <:< (K, V), + def sumByKey[K: ClassTag, V: ClassTag: Semigroup](implicit + ev: T <:< (K, V), ord: Priority[Ordering[K], DummyImplicit] ): RDD[(K, V)] = sumByKey(Partitioner.defaultPartitioner(rdd)) diff --git a/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala index 0bf777615..e6b9b4670 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/ApplicativeLaws.scala @@ -25,16 +25,14 @@ import org.scalacheck.Prop.forAll object ApplicativeLaws { import BaseProperties.{DefaultHigherEq, HigherEq} - def applyLaw[M[_], T, U](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def applyLaw[M[_], T, U](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit app: Applicative[M], arb: Arbitrary[T], arbFn: Arbitrary[T => U] ): Prop = forAll((t: T, fn: T => U) => eq(app.map(app.apply(t))(fn), app.apply(fn(t)))) - def joinLaw[M[_], T, U](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def joinLaw[M[_], T, U](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit app: Applicative[M], arb1: Arbitrary[T], arb2: Arbitrary[U] @@ -43,15 +41,13 @@ object ApplicativeLaws { // These follow from apply and join: - def sequenceLaw[M[_], T](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def sequenceLaw[M[_], T](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit app: Applicative[M], arb: Arbitrary[Seq[T]] ): Prop = forAll((ts: Seq[T]) => eq(app.sequence(ts.map(app.apply(_))), app.apply(ts))) - def joinWithLaw[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def joinWithLaw[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit app: Applicative[M], arbT: Arbitrary[T], arbU: Arbitrary[U], @@ -61,8 +57,7 @@ object ApplicativeLaws { eq(app.joinWith(app.apply(t), app.apply(u))(fn), app.apply(fn(t, u))) } - def applicativeLaws[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def applicativeLaws[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit app: Applicative[M], arbMt: Arbitrary[T], arbMts: Arbitrary[Seq[T]], diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala index ed3edaa54..faf0b9e9e 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala @@ -63,7 +63,9 @@ object BaseVectorSpaceProperties { def vectorSpaceLaws[F, C[_]]( eqfn: (C[F], C[F]) => Boolean )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]) = - isEqualIfZero(eqfn) && distributesWithPlus(eqfn) && isAssociative(eqfn) && identityOne(eqfn) && distributesOverScalarPlus( + isEqualIfZero(eqfn) && distributesWithPlus(eqfn) && isAssociative(eqfn) && identityOne( + eqfn + ) && distributesOverScalarPlus( eqfn ) diff --git a/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala index aa1d8121d..c9aadd7c5 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/FunctorLaws.scala @@ -25,15 +25,13 @@ import org.scalacheck.Prop.forAll object FunctorLaws { import BaseProperties.{DefaultHigherEq, HigherEq} - def identityLaw[M[_], V](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def identityLaw[M[_], V](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit functor: Functor[M], arb: Arbitrary[M[V]] ): Prop = forAll((mv: M[V]) => eq(functor.map(mv)(x => x), mv)) - def composeLaw[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def composeLaw[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit functor: Functor[M], arb: Arbitrary[M[T]], arbFn1: Arbitrary[T => U], @@ -43,8 +41,7 @@ object FunctorLaws { eq(functor.map(mt)(fn1.andThen(fn2)), functor.map(functor.map(mt)(fn1))(fn2)) } - def functorLaws[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])( - implicit + def functorLaws[M[_], T, U, V](eq: HigherEq[M] = new DefaultHigherEq[M])(implicit functor: Functor[M], arbMt: Arbitrary[M[T]], arbMv: Arbitrary[M[V]], diff --git a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala index 43e0e6116..ddfafef96 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala @@ -30,8 +30,7 @@ object MonadLaws { def defaultEq[T] = { (t0: T, t1: T) => (t0 == t1) } @deprecated("use leftIdentity[T]", since = "0.13.0") - def leftIdentityEquiv[M[_], T, U]( - implicit + def leftIdentityEquiv[M[_], T, U](implicit monad: Monad[M], arb: Arbitrary[T], arbfn: Arbitrary[(T) => M[U]], @@ -44,8 +43,8 @@ object MonadLaws { rightIdentity[M, T] @deprecated("use associative[T]", since = "0.13.0") - def associativeEquiv[M[_], T, U, V]( - implicit monad: Monad[M], + def associativeEquiv[M[_], T, U, V](implicit + monad: Monad[M], arb: Arbitrary[M[T]], fn1: Arbitrary[(T) => M[U]], fn2: Arbitrary[U => M[V]], @@ -54,8 +53,7 @@ object MonadLaws { associative[M, T, U, V] // Just generate a map and use that as a function: - implicit def fnArb[M[_], T, U]( - implicit + implicit def fnArb[M[_], T, U](implicit map: Arbitrary[Map[T, M[U]]], arbu: Arbitrary[M[U]] ): Arbitrary[T => M[U]] = @@ -67,8 +65,7 @@ object MonadLaws { } @deprecated("use monadLaws[T]", since = "0.13.0") - def monadLawsEquiv[M[_], T, U, R]( - implicit + def monadLawsEquiv[M[_], T, U, R](implicit monad: Monad[M], arb: Arbitrary[M[T]], equivT: Equiv[M[T]], @@ -82,8 +79,7 @@ object MonadLaws { monadLaws[M, T, U, R] // $COVERAGE-ON$ - def leftIdentity[M[_], T, U]( - implicit + def leftIdentity[M[_], T, U](implicit monad: Monad[M], arb: Arbitrary[T], arbfn: Arbitrary[(T) => M[U]], @@ -94,8 +90,8 @@ object MonadLaws { def rightIdentity[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]) = forAll((mt: M[T]) => Equiv[M[T]].equiv(mt.flatMap(_.pure[M]), mt)) - def associative[M[_], T, U, V]( - implicit monad: Monad[M], + def associative[M[_], T, U, V](implicit + monad: Monad[M], arb: Arbitrary[M[T]], fn1: Arbitrary[(T) => M[U]], fn2: Arbitrary[U => M[V]], @@ -104,8 +100,7 @@ object MonadLaws { Equiv[M[V]].equiv(mt.flatMap(f1).flatMap(f2), mt.flatMap(t => f1(t).flatMap(f2))) } - def monadLaws[M[_], T, U, R]( - implicit + def monadLaws[M[_], T, U, R](implicit monad: Monad[M], arb: Arbitrary[M[T]], equivT: Equiv[M[T]], diff --git a/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala index 422e11ad5..929f24b7d 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/StatefulSummerLaws.scala @@ -37,9 +37,12 @@ object StatefulSummerLaws { def sumIsPreserved[V: Semigroup: Equiv](summer: StatefulSummer[V], items: Iterable[V]): Boolean = { summer.flush val sg = Semigroup.sumOption(items) - val wsummer = Monoid.plus(Monoid.sum(items.map(summer.put(_)).filter { - _.isDefined - }), summer.flush) + val wsummer = Monoid.plus( + Monoid.sum(items.map(summer.put(_)).filter { + _.isDefined + }), + summer.flush + ) zeroEquiv(sg, wsummer) && summer.isFlushed } // Law 2: diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala index a638e3c0f..84636ee52 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala @@ -21,8 +21,8 @@ import org.scalacheck.Prop._ class AggregatorLaws extends CheckProperties { - implicit def aggregator[A, B, C]( - implicit prepare: Arbitrary[A => B], + implicit def aggregator[A, B, C](implicit + prepare: Arbitrary[A => B], sg: Semigroup[B], present: Arbitrary[B => C] ): Arbitrary[Aggregator[A, B, C]] = Arbitrary { @@ -101,8 +101,8 @@ class AggregatorLaws extends CheckProperties { checkNumericSum[Float] } - implicit def monoidAggregator[A, B, C]( - implicit prepare: Arbitrary[A => B], + implicit def monoidAggregator[A, B, C](implicit + prepare: Arbitrary[A => B], m: Monoid[B], present: Arbitrary[B => C] ): Arbitrary[MonoidAggregator[A, B, C]] = @@ -148,7 +148,7 @@ class AggregatorLaws extends CheckProperties { forAll { (in: List[Int], t0: Int) => val t = math.max(t0, 1) val l = in.sorted.take(t) - val a = (Aggregator.sortedTake[Int](t).apply(in)) + val a = Aggregator.sortedTake[Int](t).apply(in) l == a } } @@ -156,7 +156,7 @@ class AggregatorLaws extends CheckProperties { forAll { (in: List[Int], t0: Int, fn: Int => Int) => val t = math.max(t0, 1) val l = in.sortBy(fn).take(t) - val a = (Aggregator.sortByTake(t)(fn).apply(in)) + val a = Aggregator.sortByTake(t)(fn).apply(in) // since we considered two things equivalent under fn, // we have to use that here: val ord = Ordering.Iterable(Ordering.by(fn)) @@ -167,7 +167,7 @@ class AggregatorLaws extends CheckProperties { forAll { (in: List[Int], t0: Int, fn: Int => Int) => val t = math.max(t0, 1) val l = in.sortBy(fn).reverse.take(t) - val a = (Aggregator.sortByReverseTake(t)(fn).apply(in)) + val a = Aggregator.sortByReverseTake(t)(fn).apply(in) // since we considered two things equivalent under fn, // we have to use that here: val ord = Ordering.Iterable(Ordering.by(fn)) @@ -178,7 +178,7 @@ class AggregatorLaws extends CheckProperties { forAll { (in: List[Int], t0: Int) => val t = math.max(t0, 1) val l = in.sorted.take(t) - val a = (Aggregator.immutableSortedTake[Int](t).apply(in)) + val a = Aggregator.immutableSortedTake[Int](t).apply(in) l == a } } @@ -186,7 +186,7 @@ class AggregatorLaws extends CheckProperties { forAll { (in: List[Int], t0: Int) => val t = math.max(t0, 1) val l = in.sorted.reverse.take(t) - val a = (Aggregator.immutableSortedReverseTake[Int](t).apply(in)) + val a = Aggregator.immutableSortedReverseTake[Int](t).apply(in) l == a } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala index 9da06f938..57cae8346 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala @@ -131,7 +131,10 @@ class CollectionSpecification extends CheckProperties { } property("MMap[Int,Int] Monoid laws") { - isAssociativeDifferentTypes[ScMap[Int, Int], MMap[Int, Int]] && weakZeroDifferentTypes[ScMap[Int, Int], MMap[ + isAssociativeDifferentTypes[ScMap[Int, Int], MMap[Int, Int]] && weakZeroDifferentTypes[ScMap[ + Int, + Int + ], MMap[ Int, Int ]] @@ -158,7 +161,10 @@ class CollectionSpecification extends CheckProperties { } property("MMap[Int,String] Monoid laws") { - isAssociativeDifferentTypes[ScMap[Int, Int], MMap[Int, Int]] && weakZeroDifferentTypes[ScMap[Int, Int], MMap[ + isAssociativeDifferentTypes[ScMap[Int, Int], MMap[Int, Int]] && weakZeroDifferentTypes[ScMap[ + Int, + Int + ], MMap[ Int, Int ]] @@ -287,7 +293,7 @@ class CollectionSpecification extends CheckProperties { .filter(_._2.isDefined) .mapValues(_.get) .toMap - val m1Orm2 = (m1.keySet | m2.keySet) + val m1Orm2 = m1.keySet | m2.keySet ((m1after == m1) && (m2after == m2) && (m3.keySet == m1Orm2)) } } @@ -314,9 +320,13 @@ class CollectionSpecification extends CheckProperties { } yield AdaptiveVector.fromVector(Vector(l: _*), sparse), for { m <- Arbitrary.arbitrary[Map[Int, T]] - } yield AdaptiveVector.fromMap(m.filter { - case (k, _) => (k < 1000) && (k >= 0) - }, sparse, 1000), + } yield AdaptiveVector.fromMap( + m.filter { + case (k, _) => (k < 1000) && (k >= 0) + }, + sparse, + 1000 + ), for { size <- Gen.posNum[Int] } yield AdaptiveVector.fromMap(Map.empty, sparse, size) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala index 374255987..63e1c103c 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala @@ -66,11 +66,14 @@ class CombinatorTest extends CheckProperties { // Make sure the sets start sorted: implicit def topKArb: Arbitrary[(Map[Int, Int], Set[Int])] = Arbitrary { - for (s <- Arbitrary.arbitrary[List[Int]]; - smallvals = s.map(_ % 31); - m = smallvals - .groupBy(s => s) - .mapValues(_.size)) + for ( + s <- Arbitrary.arbitrary[List[Int]]; + smallvals = s.map(_ % 31); + m = + smallvals + .groupBy(s => s) + .mapValues(_.size) + ) yield monTopK.plus(monTopK.zero, (m.toMap, smallvals.toSet)) } property("MonoidCombinator with top-K forms a Monoid") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala index b00099762..1560b7c74 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala @@ -431,7 +431,8 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) // and uses it for all its counting/querying functionality (like an adapter) we can test CMS[K] indirectly through // testing TopPctCMS[K]. val COUNTING_CMS_MONOID = { - val ANY_HEAVY_HITTERS_PCT = 0.1 // heavy hitters functionality is not relevant for the tests using this monoid + val ANY_HEAVY_HITTERS_PCT = + 0.1 // heavy hitters functionality is not relevant for the tests using this monoid TopPctCMS.monoid[K](EPS, DELTA, SEED, ANY_HEAVY_HITTERS_PCT) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index 4e4710e29..c9595bd30 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -156,8 +156,7 @@ class EventuallyTest extends AnyWordSpec with Matchers { } class EventuallyAggregatorLaws extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { - implicit def aggregator[A, B, C]( - implicit + implicit def aggregator[A, B, C](implicit prepare: Arbitrary[A => B], sg: Semigroup[B], present: Arbitrary[B => C] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala index ee65b33b1..ed0268a7e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala @@ -102,9 +102,19 @@ class FoldTest extends AnyWordSpec { } "sequence" in { - run[Int, Seq[Long]](Fold.sequence(Seq(Fold.count(_ < 0), Fold.count { - _ >= 0 - })), Zero(Seq(0, 0)), One(1, Seq(0, 1)), Many(Seq(-2, -1, 0, 1, 2), Seq(2, 3))) + run[Int, Seq[Long]]( + Fold.sequence( + Seq( + Fold.count(_ < 0), + Fold.count { + _ >= 0 + } + ) + ), + Zero(Seq(0, 0)), + One(1, Seq(0, 1)), + Many(Seq(-2, -1, 0, 1, 2), Seq(2, 3)) + ) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala index e26f6bb19..b07f0833b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala @@ -83,54 +83,15 @@ class GeneratedAbstractAlgebraLaws extends CheckProperties { } property("tuple21 is a ring") { type T = ( - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int ) ringLaws[T] && isCommutative[T] } property("tuple22 is a ring") { type T = ( - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + Int, Int ) ringLaws[T] && isCommutative[T] } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala index 3b0c88894..8554383ae 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala @@ -156,26 +156,7 @@ class GeneratedProductAlgebraLaws extends CheckProperties { } property("Product21Ring is a ring") { type T = ( - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int ) implicit val ring = Ring[ @@ -206,28 +187,8 @@ class GeneratedProductAlgebraLaws extends CheckProperties { } property("Product22Ring is a ring") { type T = ( - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + Int, Int ) implicit val ring = Ring[ T, diff --git a/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala index 28cf1f452..db0b49441 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/PreparerLaws.scala @@ -21,8 +21,8 @@ import org.scalacheck.Prop._ class PreparerLaws extends CheckProperties { - implicit def aggregator[A, B, C]( - implicit prepare: Arbitrary[A => B], + implicit def aggregator[A, B, C](implicit + prepare: Arbitrary[A => B], sg: Semigroup[B], present: Arbitrary[B => C] ): Arbitrary[Aggregator[A, B, C]] = Arbitrary { @@ -50,8 +50,8 @@ class PreparerLaws extends CheckProperties { } } - implicit def monoidAggregator[A, B, C]( - implicit prepare: Arbitrary[A => B], + implicit def monoidAggregator[A, B, C](implicit + prepare: Arbitrary[A => B], m: Monoid[B], present: Arbitrary[B => C] ): Arbitrary[MonoidAggregator[A, B, C]] = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala b/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala index 4b9f181e1..e91a09cf3 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala @@ -14,20 +14,20 @@ class RightFolded2Test extends CheckProperties { implicit val rightFoldedMonoid = RightFolded2.monoid[Int, Long, Long](mapFn)(monFold) - def rightFolded2Value[In, Out, Acc]( - implicit arbout: Arbitrary[Out], + def rightFolded2Value[In, Out, Acc](implicit + arbout: Arbitrary[Out], mon: RightFolded2Monoid[In, Out, Acc] ): Gen[RightFoldedValue2[In, Out, Acc]] = for (v <- arbout.arbitrary) yield mon.init(v) - def rightFolded2ToFold[In, Out, Acc]( - implicit arbin: Arbitrary[In], + def rightFolded2ToFold[In, Out, Acc](implicit + arbin: Arbitrary[In], mon: RightFolded2Monoid[In, Out, Acc] ): Gen[RightFoldedToFold2[In]] = for (v <- arbin.arbitrary) yield mon.toFold(v) - implicit def rightFolded2[In, Out, Acc]( - implicit arbin: Arbitrary[In], + implicit def rightFolded2[In, Out, Acc](implicit + arbin: Arbitrary[In], arbout: Arbitrary[Out], mon: RightFolded2Monoid[In, Out, Acc] ): Arbitrary[RightFolded2[In, Out, Acc]] = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala index b82d32c63..d973c5b70 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala @@ -194,7 +194,22 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 14 aggregators" in { - val agg: Aggregator[Int, Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int], Tuple14[ + val agg: Aggregator[Int, Tuple14[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], Tuple14[ Int, Int, Int, @@ -844,7 +859,22 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 14 aggregators" in { - val agg: Aggregator[Int, Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int], Tuple14[ + val agg: Aggregator[Int, Tuple14[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], Tuple14[ Int, Int, Int, @@ -1428,7 +1458,18 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 10 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple10[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long], Tuple10[ + val agg: MonoidAggregator[Long, Tuple10[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], Tuple10[ Long, Long, Long, @@ -2352,7 +2393,22 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from 14 (key, aggregator) pairs" in { - val agg: Aggregator[Int, Tuple14[Long, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int], Map[ + val agg: Aggregator[Int, Tuple14[ + Long, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], Map[ String, Long ]] = MapAggregator( diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala index 144d5e361..294d9369e 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala @@ -21,8 +21,8 @@ import com.twitter.util.Future /** * @author Ian O Connell */ -class NullSummer[Key, Value](tuplesIn: Incrementor, tuplesOut: Incrementor)( - implicit semigroup: Semigroup[Value] +class NullSummer[Key, Value](tuplesIn: Incrementor, tuplesOut: Incrementor)(implicit + semigroup: Semigroup[Value] ) extends AsyncSummer[(Key, Value), Map[Key, Value]] { def flush: Future[Map[Key, Value]] = Future.value(Map.empty) def tick: Future[Map[Key, Value]] = Future.value(Map.empty) diff --git a/project/GenTupleAggregators.scala b/project/GenTupleAggregators.scala index 3c995a085..bda5634e2 100644 --- a/project/GenTupleAggregators.scala +++ b/project/GenTupleAggregators.scala @@ -55,7 +55,7 @@ object MultiAggregator { .map { i => val methodName = name.getOrElse("from%d".format(i)) val aggType = if (isMonoid) "Monoid" else "" - val nums = (1 to i) + val nums = 1 to i val bs = nums.map("B" + _).mkString(", ") val cs = nums.map("C" + _).mkString(", ") val aggs = nums.map(x => "%sAggregator[A, B%s, C%s]".format(aggType, x, x)).mkString(", ") From 1775faaf37adb720554e2b8c6f9785bd0a8b59e5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 21 Apr 2020 15:21:15 +0200 Subject: [PATCH 058/306] Update sbt-scalafix to 0.9.15 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 390fe5f04..29fe39f00 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.14") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.15") From 9799114bd49bc10fb76ad94b7fd79fde408d0b52 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Wed, 6 May 2020 22:23:24 -0400 Subject: [PATCH 059/306] Fix scalacOptions (#818) --- build.sbt | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/build.sbt b/build.sbt index 5641dc422..7c8456675 100644 --- a/build.sbt +++ b/build.sbt @@ -52,22 +52,18 @@ val sharedSettings = Seq( "-language:existentials" ), scalacOptions ++= { - if (scalaVersion.value.startsWith("2.11")) - Seq("-Ywarn-unused", "-Ywarn-unused-import") - else - Seq() - }, - scalacOptions ++= { - if (scalaVersion.value.startsWith("2.12")) - Seq("-Ywarn-unused", "-opt:l:inline", "-opt-inline-from:com.twitter.algebird.**") - else - Seq("-optimize") - }, - scalacOptions ++= { - if (isScala213x(scalaVersion.value)) { - Seq("-Ymacro-annotations", "-Ywarn-unused") - } else { - Seq() + VersionNumber(scalaVersion.value) match { + case v if v.matchesSemVer(SemanticSelector("<2.12")) => + Seq("-Ywarn-unused", "-Ywarn-unused-import", "-optimize") + case v if v.matchesSemVer(SemanticSelector(">=2.12")) => + val ops = Seq("-Ywarn-unused", "-opt:l:inline", "-opt-inline-from:com.twitter.algebird.**") + if (v.matchesSemVer(SemanticSelector("2.13.x"))) { + "-Ymacro-annotations" +: ops + } else { + ops + } + case _ => + Nil } }, javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), From 134429978db71ef8634e14b0c066a6a52f768572 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Wed, 6 May 2020 22:23:51 -0400 Subject: [PATCH 060/306] Update scala version 2.12.11 and 2.13.2 (#819) --- .travis.yml | 8 ++++---- README.md | 2 +- build.sbt | 12 ++++++------ 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.travis.yml b/.travis.yml index eaafb5d6c..41e092799 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ matrix: "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.12.10 + - scala: 2.12.11 jdk: openjdk8 before_install: - export PATH=${PATH}:./vendor/bundle @@ -40,19 +40,19 @@ matrix: "++$TRAVIS_SCALA_VERSION docs/makeMicrosite" \ "++$TRAVIS_SCALA_VERSION mimaReportBinaryIssues" - - scala: 2.12.10 + - scala: 2.12.11 jdk: openjdk11 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.13.1 + - scala: 2.13.2 jdk: openjdk8 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.13.1 + - scala: 2.13.2 jdk: openjdk11 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ diff --git a/README.md b/README.md index 964558d75..f23d26357 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ See the [Algebird website](https://twitter.github.io/algebird) for more informat ```scala > sbt algebird-core/console -Welcome to Scala 2.12.10 (OpenJDK 64-Bit Server VM, Java 11.0.1). +Welcome to Scala 2.12.11 (OpenJDK 64-Bit Server VM, Java 11.0.1). Type in expressions for evaluation. Or try :help. scala> import com.twitter.algebird._ diff --git a/build.sbt b/build.sbt index 7c8456675..2a2789bee 100644 --- a/build.sbt +++ b/build.sbt @@ -36,7 +36,7 @@ def isScala213x(scalaVersion: String) = scalaBinaryVersion(scalaVersion) == "2.1 val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.10", + scalaVersion := "2.12.11", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, @@ -234,7 +234,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.1", + crossScalaVersions += "2.13.2", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -263,7 +263,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( testOptions in Test ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.1", + crossScalaVersions += "2.13.2", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -291,14 +291,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.1", + crossScalaVersions += "2.13.2", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.1", + crossScalaVersions += "2.13.2", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -315,7 +315,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.1", + crossScalaVersions += "2.13.2", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.3", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 179894d1e77556b9ec047a9b55d62c0e188b1f91 Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Thu, 7 May 2020 11:18:13 -1000 Subject: [PATCH 061/306] Setting version to 0.13.7 --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index ee61ea4b2..9e946e8cc 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "0.13.7-SNAPSHOT" +version in ThisBuild := "0.13.7" From 5c40ed03c4d27a8758f82e7dcedf9c851ca7de8d Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Thu, 7 May 2020 11:22:34 -1000 Subject: [PATCH 062/306] Setting version to 0.13.8-SNAPSHOT --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index 9e946e8cc..b14908297 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "0.13.7" +version in ThisBuild := "0.13.8-SNAPSHOT" From 54868729839a53cd5181a1e61f63c4d7bc2cb129 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 12 May 2020 23:17:52 +0200 Subject: [PATCH 063/306] Update sbt-microsites to 1.2.1 (#822) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 29fe39f00..de7e34e1f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.0") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") From d495be6f2c627131da9a163ce3c732f0842d2709 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 12 May 2020 23:18:32 +0200 Subject: [PATCH 064/306] Update scalatest to 3.1.2 (#821) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 2a2789bee..67c2ad59c 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.1.1" +val scalaTestVersion = "3.1.2" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" From daab5cc69aae3902c5f5247366e862edaef2ea99 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 12 May 2020 23:18:46 +0200 Subject: [PATCH 065/306] Update scalafmt-core to 2.5.2 (#820) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index e605791d1..bf7f3aa5d 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.5.1 +version=2.5.2 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From cc2a3781a7388fadb40decc3d0581314341fead0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 14 May 2020 18:27:49 +0200 Subject: [PATCH 066/306] Update sbt-scalafmt to 2.4.0 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index de7e34e1f..f1b46297b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.4") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") From 3ae0deec1cb7956c63f367229795b003f17e9366 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 19 Jun 2020 23:21:57 +0200 Subject: [PATCH 067/306] Update sbt-sonatype to 3.9.3 (#833) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index f1b46297b..1078cb2f4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,6 +13,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.2") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.15") From fe16cd0ee7255c128f639128cd5bb082de01bbfd Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 28 Jun 2020 22:57:22 +0200 Subject: [PATCH 068/306] Update sbt to 1.3.13 (#835) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 797e7ccfd..0837f7a13 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.10 +sbt.version=1.3.13 From 5bcc04067034cd22a843211d06b778dd10c23223 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 29 Jun 2020 15:44:28 +0200 Subject: [PATCH 069/306] Update spark-core to 2.4.6 (#830) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 67c2ad59c..b8c8b2871 100644 --- a/build.sbt +++ b/build.sbt @@ -13,7 +13,7 @@ val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" val utilVersion = "20.4.1" -val sparkVersion = "2.4.5" +val sparkVersion = "2.4.6" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = CrossVersion.partialVersion(scalaVersion) match { From cf913f69992bf4345c3d38a46698c3c7278c6d3f Mon Sep 17 00:00:00 2001 From: Neville Li Date: Mon, 29 Jun 2020 13:53:05 -0400 Subject: [PATCH 070/306] update dependencies (#836) * update dependencies * fmt --- .scalafmt.conf | 2 +- .../com/twitter/algebird/BloomFilter.scala | 1 - .../scala/com/twitter/algebird/ExpHist.scala | 2 - .../algebird/GeneratedProductAlgebra.scala | 10 ++++- .../com/twitter/algebird/HyperLogLog.scala | 1 - .../scala/com/twitter/algebird/Metric.scala | 1 - .../scala/com/twitter/algebird/QTree.scala | 1 - .../scala/com/twitter/algebird/Ring.scala | 1 - .../scala/com/twitter/algebird/Scan.scala | 6 --- .../com/twitter/algebird/VectorSpace.scala | 1 - .../scala/com/twitter/algebird/Window.scala | 1 - .../algebird/AppendAggregatorTest.scala | 1 - .../com/twitter/algebird/BatchedTest.scala | 2 - .../com/twitter/algebird/EventuallyTest.scala | 1 - .../GeneratedAbstractAlgebraLaws.scala | 45 +++++++++++++++++-- .../GeneratedProductAlgebraLaws.scala | 45 +++++++++++++++++-- .../twitter/algebird/HyperLogLogTest.scala | 2 - .../com/twitter/algebird/MinHasherTest.scala | 1 - .../algebird/MinMaxAggregatorSpec.scala | 1 - .../com/twitter/algebird/MomentsLaws.scala | 1 - .../com/twitter/algebird/MonadFoldMTest.scala | 1 - .../com/twitter/algebird/OperatorTest.scala | 2 - .../com/twitter/algebird/QTreeTest.scala | 2 - .../com/twitter/algebird/SketchMapTest.scala | 1 - .../com/twitter/algebird/SpaceSaverTest.scala | 1 - .../algebird/TupleAggregatorsTest.scala | 1 - .../algebird/statistics/StatisticsTests.scala | 1 - build.sbt | 4 +- project/plugins.sbt | 2 +- 29 files changed, 96 insertions(+), 45 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index bf7f3aa5d..a1885ce41 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.5.2 +version=2.6.1 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index 57e2c043d..43d30c52a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -190,7 +190,6 @@ object BloomFilter { * 2) query: hash the value k times. If there are k collisions, then return true; otherwise false. * * http://en.wikipedia.org/wiki/Bloom_filter - * */ case class BloomFilterMonoid[A](numHashes: Int, width: Int)(implicit hash: Hash128[A]) extends Monoid[BF[A]] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index c0bb6fe32..a2ea9cef6 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -29,7 +29,6 @@ import scala.collection.mutable.Builder * - Query EH with a shorter window than the configured window * - Discussion of epsilon vs memory tradeoffs * - * * @param conf the config values for this instance. * @param buckets Vector of timestamps of each (powers of 2) * ticks. This is the key to the exponential histogram @@ -340,7 +339,6 @@ object ExpHist { * }}} * 15 = (3 * 2^0) + (2 * 2^1) + (2 * 2^2) * - * * the "l" in l-canonical means that * * - all return vector entries but the last one == `l` or `l + 1` diff --git a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala index 60a820f77..f43b4eb5b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala @@ -489,8 +489,14 @@ class Product6Group[X, A, B, C, D, E, F]( class Product6Ring[X, A, B, C, D, E, F]( apply: (A, B, C, D, E, F) => X, unapply: X => Option[(A, B, C, D, E, F)] -)(implicit aring: Ring[A], bring: Ring[B], cring: Ring[C], dring: Ring[D], ering: Ring[E], fring: Ring[F]) - extends Product6Group[X, A, B, C, D, E, F]( +)(implicit + aring: Ring[A], + bring: Ring[B], + cring: Ring[C], + dring: Ring[D], + ering: Ring[E], + fring: Ring[F] +) extends Product6Group[X, A, B, C, D, E, F]( apply: (A, B, C, D, E, F) => X, unapply: X => Option[(A, B, C, D, E, F)] ) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index 376c5ea21..5eff4aa40 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -89,7 +89,6 @@ object HyperLogLog { j(bsl.in, bits) /** - * */ def j(bytes: Array[Byte], bits: Int): Int = { var i = 0 diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala index 7b7544976..5a66d0f32 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala @@ -36,7 +36,6 @@ import scala.annotation.implicitNotFound * 4. m(v1, v3) <= m(v1, v2) + m(v2, v3) * * If you implement this trait, make sure that you follow these rules. - * */ object Metric { def apply[V: Metric](v1: V, v2: V): Double = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index 24ba33644..3a9bc6e3c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -250,7 +250,6 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * * Find the smallest dyadic interval that contains the dyadic interval * for this tree's root and the other tree's root, and return its * level (that is, the power of 2 for the interval). diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala index 3333d7450..a3037fc6f 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala @@ -35,7 +35,6 @@ import scala.annotation.implicitNotFound * - addition * - multiplication * - * * Note, if you have distributive property, additive inverses, and multiplicative identity you * can prove you have a commutative group under the ring: * diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala index de2d59d33..cfaca330e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala @@ -54,7 +54,6 @@ object Scan { def identity[A]: Aux[A, Unit, A] = fromFunction[A, A](x => x) /** - * * @param initStateCreator A call-by-name method that allocates new mutable state * @param presentAndUpdateStateFn A function that both presents the output value, and has the side-effect of updating the mutable state * @tparam I @@ -77,7 +76,6 @@ object Scan { def const[T](t: T): Aux[Any, Unit, T] = fromFunction(_ => t) /** - * * @param aggregator * @param initState * @tparam A @@ -96,7 +94,6 @@ object Scan { } /** - * * @param monoidAggregator * @tparam A * @tparam B @@ -122,7 +119,6 @@ object Scan { * * The canonical method to use a scan is `apply`. * - * * @tparam I The type of elements that the computation is scanning over. * @tparam O The output type of the scan (typically distinct from the hidden `State` of the scan). */ @@ -142,7 +138,6 @@ sealed abstract class Scan[-I, +O] extends Serializable { def initialState: State /** - * * @param i An element in the stream to process * @param stateBeforeProcessingI The state of the scan before processing i * @return The output of the scan corresponding to processing i with state stateBeforeProcessing, @@ -156,7 +151,6 @@ sealed abstract class Scan[-I, +O] extends Serializable { * `Iterator(o_1, ..., o_n)` where * `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)` * and `state_0 = initialState` - * */ def scanIterator(iter: Iterator[I]): Iterator[O] = new AbstractIterator[O] { override def hasNext: Boolean = iter.hasNext diff --git a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala index d429aa283..d53ff963a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala @@ -22,7 +22,6 @@ import scala.annotation.implicitNotFound * This class represents a vector space. For the required properties see: * * http://en.wikipedia.org/wiki/Vector_space#Definition - * */ object VectorSpace extends VectorSpaceOps with Implicits diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala index 5168ca529..16704052c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala @@ -21,7 +21,6 @@ import java.io.Serializable import Operators._ /** - * * Convenience case class defined with a monoid for aggregating elements over * a finite window. * diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala index 343c28e0e..8b7bdab42 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala index 7e83d5ca0..7e72938c4 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala @@ -1,7 +1,5 @@ package com.twitter.algebird -import org.scalatest._ - import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.{Arbitrary, Gen} import Arbitrary.arbitrary diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index c9595bd30..f77cc3a88 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.{Arbitrary, Gen, Prop} import org.scalatest.matchers.should.Matchers diff --git a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala index b07f0833b..e26f6bb19 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedAbstractAlgebraLaws.scala @@ -83,15 +83,54 @@ class GeneratedAbstractAlgebraLaws extends CheckProperties { } property("tuple21 is a ring") { type T = ( - Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, Int ) ringLaws[T] && isCommutative[T] } property("tuple22 is a ring") { type T = ( - Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - Int, Int + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int ) ringLaws[T] && isCommutative[T] } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala index 8554383ae..3b0c88894 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/GeneratedProductAlgebraLaws.scala @@ -156,7 +156,26 @@ class GeneratedProductAlgebraLaws extends CheckProperties { } property("Product21Ring is a ring") { type T = ( - Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, Int ) implicit val ring = Ring[ @@ -187,8 +206,28 @@ class GeneratedProductAlgebraLaws extends CheckProperties { } property("Product22Ring is a ring") { type T = ( - Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, - Int, Int + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int ) implicit val ring = Ring[ T, diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index 6b9136dfe..ae50c1f07 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -1,7 +1,5 @@ package com.twitter.algebird -import org.scalatest._ - import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.{Arbitrary, Gen, Prop} diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala index 1dd77b2aa..998b92c38 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala @@ -2,7 +2,6 @@ package com.twitter.algebird import com.twitter.algebird.BaseProperties._ import org.scalacheck.{Arbitrary, Gen} -import org.scalatest._ import scala.math.Equiv import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala index e4d99ef03..54afae509 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 625f67d17..115ca7600 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.scalacheck.arbitrary._ import org.scalatest.matchers.should.Matchers diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala index 169c5b140..fba85fda1 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MonadFoldMTest.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala index 050d957ae..04d46f9d8 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/OperatorTest.scala @@ -1,7 +1,5 @@ package com.twitter.algebird -import org.scalatest._ - import Operators._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala index bf916cc1b..9815f71b0 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala @@ -16,8 +16,6 @@ limitations under the License. package com.twitter.algebird -import org.scalatest._ - import org.scalacheck.Arbitrary import org.scalacheck.Gen.choose import org.scalatest.matchers.should.Matchers diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala index 855f95cb7..501c4022f 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import org.scalacheck.{Arbitrary, Gen} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala index 0c4fc72e4..49c1dcce4 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala @@ -2,7 +2,6 @@ package com.twitter.algebird import org.scalacheck.Prop._ import org.scalacheck.{Arbitrary, Gen} -import org.scalatest._ import scala.util.Try import org.scalatest.matchers.should.Matchers diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala index d973c5b70..b2d9f58b1 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala @@ -1,6 +1,5 @@ package com.twitter.algebird -import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala index 825f8cccf..31af74502 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala @@ -4,7 +4,6 @@ import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.CheckProperties import org.scalacheck.Arbitrary import org.scalacheck.Gen._ -import org.scalatest._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec diff --git a/build.sbt b/build.sbt index b8c8b2871..c0401ca94 100644 --- a/build.sbt +++ b/build.sbt @@ -8,11 +8,11 @@ val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.1.2" +val scalaTestVersion = "3.2.0" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" -val utilVersion = "20.4.1" +val utilVersion = "20.6.0" val sparkVersion = "2.4.6" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = diff --git a/project/plugins.sbt b/project/plugins.sbt index 1078cb2f4..0401107a2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.15") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.17") From b6c6269daed5d9d5b73b6b771c1330f50e38d8fb Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 4 Jul 2020 22:29:53 +0200 Subject: [PATCH 071/306] Update sbt-sonatype to 3.9.4 (#838) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 0401107a2..4a66a74ec 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,6 +13,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.3") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.17") From 1bafbfca65cb0eafb8697816f2212489fd95c547 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 4 Jul 2020 22:32:04 +0200 Subject: [PATCH 072/306] Update sbt-scalafix to 0.9.18 (#837) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4a66a74ec..c8ca60e5d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.17") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.18") From 128153a12de127ddbbcd343500130b1bf51ba298 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sun, 5 Jul 2020 19:46:52 +0200 Subject: [PATCH 073/306] Update scalafmt-core to 2.6.2 --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index a1885ce41..b60581c8b 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.6.1 +version=2.6.2 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From 4628adade2db651058f843dad7a7f0b5d0f85de5 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 11 Jul 2020 02:37:20 +0200 Subject: [PATCH 074/306] Update scalafmt-core to 2.6.3 --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index b60581c8b..8aab84ea9 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.6.2 +version=2.6.3 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From 7a0006ca4aa8ea98dab9a3890131adb6c708601e Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 12 Jul 2020 19:18:15 +0100 Subject: [PATCH 075/306] Update scala to 2.13.3 (#842) --- .travis.yml | 4 ++-- build.sbt | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 41e092799..be56c0358 100644 --- a/.travis.yml +++ b/.travis.yml @@ -46,13 +46,13 @@ matrix: "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.13.2 + - scala: 2.13.3 jdk: openjdk8 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.13.2 + - scala: 2.13.3 jdk: openjdk11 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ diff --git a/build.sbt b/build.sbt index c0401ca94..046768359 100644 --- a/build.sbt +++ b/build.sbt @@ -234,7 +234,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.2", + crossScalaVersions += "2.13.3", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -263,7 +263,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( testOptions in Test ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.2", + crossScalaVersions += "2.13.3", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -291,14 +291,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.2", + crossScalaVersions += "2.13.3", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.2", + crossScalaVersions += "2.13.3", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -315,7 +315,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.2", + crossScalaVersions += "2.13.3", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.3", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 4543b76084f103c88d6d6eb4613bd503e9097dc6 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 15 Jul 2020 01:27:17 +0200 Subject: [PATCH 076/306] Update sbt-scalafix to 0.9.19 (#843) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index c8ca60e5d..a6932083b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.18") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.19") From e2ecb270ad3261a9e7627288db73a8c44e948806 Mon Sep 17 00:00:00 2001 From: Jeff Sarnat Date: Wed, 15 Jul 2020 22:15:25 -0700 Subject: [PATCH 077/306] group for calculating Pearson's Correlation Coefficient (#829) * defined Correlation case class * semigroup * first tests, but they fail * tests pass * ok, it's a group * some more test stuff * tests around mean, stddev + a bug fix that the test found * correlation tests * scalafmt * fixed up comments, deleted dead code * deleted commented out line leftover from debugging * most of the review feedback * new generator for Correlation * rename left/right to x/y * scale * no longer have a conditional around 0 counts, generator now generates arbitrary values including those with higher moments; we now expect for negating a Correlation to negate the sign of its .correlation method * it's not a group after all * put zero check back in * scalfmt * scalafmtAll, rebased --- .../twitter/algebird/CorrelationMonoid.scala | 157 ++++++++++++++++++ .../com/twitter/algebird/BaseProperties.scala | 3 + .../algebird/scalacheck/Arbitrary.scala | 2 + .../com/twitter/algebird/scalacheck/Gen.scala | 42 +++++ .../twitter/algebird/CorrelationLaws.scala | 134 +++++++++++++++ 5 files changed, 338 insertions(+) create mode 100644 algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala create mode 100644 algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala new file mode 100644 index 000000000..fb2eae563 --- /dev/null +++ b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala @@ -0,0 +1,157 @@ +package com.twitter.algebird + +object Correlation { + def apply(x: (Double, Double), weight: Double): Correlation = + Correlation(c2 = 0, m2x = 0, m2y = 0, m1x = x._1, m1y = x._2, weight) + + def apply(x: (Double, Double)): Correlation = + apply(x, 1.0) + + implicit val monoid: Monoid[Correlation] = CorrelationMonoid + + /** + * When combining averages, if the counts sizes are too close we + * should use a different algorithm. This constant defines how + * close the ratio of the smaller to the total count can be: + */ + private val STABILITY_CONSTANT = 0.1 + + /** + * Given two streams of doubles (weightN, an) and (weightK, ak) of form (weighted count, + * mean), calculates the mean of the combined stream. + * + * Uses a more stable online algorithm which should be suitable for + * large numbers of records similar to: + * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm + * + * This differs from the implementation in MomentsGroup.scala only in that here, the counts are weighted, and are + * thus doubles instead of longs + */ + def getCombinedMean(weightN: Double, an: Double, weightK: Double, ak: Double): Double = + if (weightN < weightK) getCombinedMean(weightK, ak, weightN, an) + else + (weightN + weightK) match { + case 0.0 => 0.0 + case newCount if newCount == weightN => an + case newCount => + val scaling = weightK / newCount + // a_n + (a_k - a_n)*(k/(n+k)) is only stable if n is not approximately k + if (scaling < STABILITY_CONSTANT) (an + (ak - an) * scaling) + else (weightN * an + weightK * ak) / newCount + } + +} + +/** + * A class to calculate covariance and the first two central moments of a sequence of pairs of Doubles, from which the + * pearson correlation coeifficient can be calculated. + * + * m{i}x denotes the ith central moment of the first projection of the pair. + * m{i}y denotes the ith central moment of the second projection of the pair. + * c2 the covariance equivalent of the second central moment, i.e. c2 = Sum_(x,y) (x - m1x)*(y - m1y). + */ +case class Correlation(c2: Double, m2x: Double, m2y: Double, m1x: Double, m1y: Double, m0: Double) { + def totalWeight: Double = m0 + + def meanX: Double = m1x + + def meanY: Double = m1y + + // variance, stddev, and covariance are for the population, not a sample + + def varianceX: Double = m2x / m0 + + def varianceY: Double = m2y / m0 + + def stddevX: Double = Math.sqrt(varianceX) + + def stddevY: Double = Math.sqrt(varianceY) + + def covariance: Double = c2 / totalWeight + + /** + * @return Pearson's correlation coefficient + */ + def correlation: Double = + // correlation is defined as: covariance / (varianceLeft * varianceRight) + // however, dividing by "count" cancels out, and leaves us with the following formula, which relies on fewer + // divisions + c2 / (Math.sqrt(m2x * m2y)) + + /** + * Assume this instance of Correlation came from summing together Correlation.apply((x_i, y_i)) for i in 1...n. + * + * @return (m, b) where y = mx + b is the line with the least squares fit of the points (x_i, y_i). + * See, e.g. https://mathworld.wolfram.com/LeastSquaresFitting.html. + */ + def linearLeastSquares: (Double, Double) = { + val m = c2 / m2x + val b = meanY - m * meanX + (m, b) + } + + def swap: Correlation = + Correlation(c2 = c2, m2x = m2y, m2y = m2x, m1x = m1y, m1y = m1x, m0 = m0) + + def distanceMetric: Double = math.sqrt(1.0 - correlation) + + def scale(z: Double): Correlation = + if (z < 0.0) // the "extraneous" if here is to avoid allocating the error message unless necessary + throw new IllegalArgumentException(s"cannot scale by negative value: $z") + else if (z == 0) + CorrelationMonoid.zero + else + Correlation(c2 = z * c2, m2x = z * m2x, m2y = z * m2y, m1x = m1x, m1y = m1y, m0 = z * m0) +} + +object CorrelationMonoid extends Monoid[Correlation] { + + /** + * The algorithm for combining the correlation calculations from two partitions of pairs of numbers. Comes from + * Pébay, Philippe (2008), "Formulas for Robust, One-Pass Parallel Computation of Covariances and Arbitrary-Order Statistical Moments", + * Technical Report SAND2008-6212, Sandia National Laboratories + * https://prod-ng.sandia.gov/techlib-noauth/access-control.cgi/2008/086212.pdf + * + * Extending this to weights can be found in + * Schubert, Erich; Gertz, Michael (9 July 2018). Numerically stable parallel computation of (co-)variance. + * ACM. p. 10. doi:10.1145/3221269.3223036. ISBN 9781450365055. + * http://dl.acm.org/citation.cfm?id=3221269.3223036 + * https://dl.acm.org/doi/10.1145/3221269.3223036 + */ + override def plus(a: Correlation, b: Correlation): Correlation = { + val count = a.totalWeight + b.totalWeight + if (count == 0) + CorrelationMonoid.zero + else { + val prodSumRatio = a.totalWeight * b.totalWeight / count + + val m1x = Correlation.getCombinedMean(a.totalWeight, a.m1x, b.totalWeight, b.m1x) + val m1y = Correlation.getCombinedMean(a.totalWeight, a.m1y, b.totalWeight, b.m1y) + val deltaX = b.m1x - a.m1x + val deltaY = b.m1y - a.m1y + + val m2x = a.m2x + b.m2x + math.pow(deltaX, 2) * prodSumRatio + val m2y = + a.m2y + b.m2y + math.pow(deltaY, 2) * prodSumRatio + + val c2 = a.c2 + b.c2 + deltaX * deltaY * prodSumRatio + + Correlation(c2 = c2, m2x = m2x, m2y = m2y, m1x = m1x, m1y = m1y, m0 = count) + } + } + + override val zero = Correlation(0, 0, 0, 0, 0, 0) +} + +object CorrelationAggregator extends MonoidAggregator[(Double, Double), Correlation, Correlation] { + override def prepare(a: (Double, Double)): Correlation = Correlation(a) + override val monoid = CorrelationMonoid + override def present(c: Correlation): Correlation = c + + def correlation: MonoidAggregator[(Double, Double), Correlation, Double] = + this.andThenPresent(_.correlation) + + def covariance: MonoidAggregator[(Double, Double), Correlation, Double] = + this.andThenPresent(_.covariance) + +} diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala index fdb83948b..ba62cb889 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala @@ -160,6 +160,9 @@ object BaseProperties extends MetricProperties { scala.math.abs(f1) < eps else (scala.math.abs(f1 - f2) / scala.math.abs(f2)) < eps + def approxEqOrBothNaN(eps: Double)(f1: Double, f2: Double): Boolean = + (f1.isNaN && f2.isNaN) || f1 == f2 || approxEq(eps)(f1, f2) + trait HigherEq[M[_]] { def apply[T](m: M[T], n: M[T]): Boolean } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Arbitrary.scala b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Arbitrary.scala index 646998755..5b8dce4b3 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Arbitrary.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Arbitrary.scala @@ -56,4 +56,6 @@ object arbitrary extends ExpHistArb with IntervalArb { implicit val momentsArb: Arbitrary[Moments] = Arbitrary(genMoments) implicit val stringSpaceSaverArb: Arbitrary[SpaceSaver[String]] = Arbitrary(genStringSpaceSaver) + + implicit val correlationArb: Arbitrary[Correlation] = Arbitrary(genCorrelation) } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala index 4d04ffb04..c3043f8a9 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala @@ -69,4 +69,46 @@ object gen extends ExpHistGen with IntervalGen { def genSSManySpaceSaver: Gen[SpaceSaver[String]] = Gen.nonEmptyListOf(genFixedSSOneSpaceSaver).flatMap(l => l.reduce(_ ++ _)) + lazy val genCorrelation: Gen[Correlation] = { + val recur = Gen.lzy(genCorrelation) + + // we can start with any pair of numbers: + val genClose: Gen[Correlation] = for { + x <- choose(-1000, 1000) + delta <- choose(-100.0, 100.0) + } yield Correlation((x, x + delta)) + + val genUncorr: Gen[Correlation] = for { + x <- choose(-1e10, 1e10) + y <- choose(-1e10, 1e10) + } yield Correlation((x, y)) + + val genRandom: Gen[Correlation] = + for { + c2 <- choose(-1e10, 1e10) + m2x <- choose(0, 1e10) + m2y <- choose(0, 1e10) + m1x <- choose(-1e10, 1e10) + m1y <- choose(-1e10, 1e10) + m0 <- choose(-1e10, 1e10) + } yield Correlation( + c2 = c2, + m2x = m2x, + m2y = m2y, + m1x = m1x, + m1y = m1y, + m0 = m0 + ) + + val genSum = Gen.zip(recur, recur).map { case (a, b) => CorrelationMonoid.plus(a, b) } + // now return with a low probability of choosing the branching cases: + Gen + .frequency( + (5, genClose), + (5, genUncorr), + (1, genRandom), + (1, CorrelationMonoid.zero), + (1, genSum) //, + ) + } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala new file mode 100644 index 000000000..c92c83204 --- /dev/null +++ b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala @@ -0,0 +1,134 @@ +package com.twitter.algebird + +import org.scalacheck.Prop.forAll +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec +import com.twitter.algebird.BaseProperties._ +import com.twitter.algebird.scalacheck.arbitrary._ + +object CorrelationLaws { + val EPS = 1e-10 + + def aggregateFunction(f: Double => Double): Aggregator[Double, Correlation, Double] = + CorrelationAggregator.correlation + .composePrepare[Double](x => (x, f(x))) + + val testList = Range.inclusive(-10, 10).map(_.toDouble).toList + + def corrApproxEq(corr1: Correlation, corr2: Correlation): Boolean = + approxEqOrBothNaN(EPS)(corr1.c2, corr2.c2) && + approxEqOrBothNaN(EPS)(corr1.m2x, corr2.m2x) && + approxEqOrBothNaN(EPS)(corr1.m2y, corr2.m2y) && + approxEqOrBothNaN(EPS)(corr1.m1x, corr2.m1x) && + approxEqOrBothNaN(EPS)(corr1.m1y, corr2.m1y) && + approxEqOrBothNaN(EPS)(corr1.m0, corr2.m0) + +} + +class CorrelationLaws extends CheckProperties { + + import CorrelationLaws._ + + property("Correlation monoid laws") { + implicit val equiv: Equiv[Correlation] = + Equiv.fromFunction(corrApproxEq) + monoidLaws[Correlation] + } + + property("Central moments 0 through 2 match implementation from Moments") { + forAll { (l: List[(Double, Double)]) => + val corr = Monoid.sum(l.map(Correlation.apply)) + val momentX = Monoid.sum(l.map { case (x, _) => Moments.apply(x) }) + val momentY = Monoid.sum(l.map { case (_, y) => Moments.apply(y) }) + approxEq(EPS)(corr.totalWeight, momentX.count) && + approxEq(EPS)(corr.totalWeight, momentY.count) && + approxEq(EPS)(corr.meanX, momentX.mean) && + approxEq(EPS)(corr.meanY, momentY.mean) && + (l.length < 2 || + (approxEqOrBothNaN(EPS)(corr.stddevX, momentX.stddev) && + approxEqOrBothNaN(EPS)(corr.stddevY, momentY.stddev))) + } + } + + property("central moments of a line return the slope of the line") { + // using arbitrary floating point values for this test yields far less numerical precision + forAll { (m: Int, b: Int) => + val calculatedCorrelation = aggregateFunction(x => m * x + b)(testList) + (m == 0.0 + || (m > 0.0 && approxEq(EPS)(calculatedCorrelation, 1.0)) + || (m < 0.0 && approxEq(EPS)(calculatedCorrelation, -1.0))) + } + } + + property("given points exactly on a straight line, least squares approximation finds slope and intercept") { + // using arbitrary floating point values for this test yields far less numerical precision + forAll { (m: Int, b: Int) => + val (mOut, bOut) = + CorrelationAggregator + .composePrepare[Double](x => (x, m * x + b)) + .andThenPresent(_.linearLeastSquares)(testList) + + approxEq(EPS)(m.toDouble, mOut) && approxEq(EPS)(b.toDouble, bOut) + } + } + + property("the swap method on moments works as you'd think") { + forAll { l: List[(Double, Double)] => + val swapped = CorrelationAggregator(l).swap + val reversedInput = CorrelationAggregator.composePrepare[(Double, Double)] { case (x, y) => (y, x) }(l) + corrApproxEq(swapped, reversedInput) + } + } + + property("scaling by 0 and 1 works as you'd expect") { + forAll { (corr: Correlation) => + corrApproxEq(corr.scale(0.0), CorrelationMonoid.zero) && + corr.scale(1.0) == corr + } + } + + property("scaling by a and b is the same as scaling by a*b; similarly for addition") { + // use Int here instead of doubles so that we don't have to worry about overlfowing to Infinity and having to + // fine-tune numerical precision thresholds. + forAll { (corr: Correlation, a0: Int, b0: Int) => + val a = a0 & Int.MaxValue + val b = b0 & Int.MaxValue + (corrApproxEq(corr.scale(a).scale(b), corr.scale(a.toDouble * b)) && + corrApproxEq(corr.scale(a.toDouble + b), CorrelationMonoid.plus(corr.scale(a), corr.scale(b)))) + } + } + + property("adding together scaled correlations is the same as scaling then adding") { + forAll { (corr1: Correlation, corr2: Correlation, z0: Int) => + val z = z0 & Int.MaxValue + val addThenScale = CorrelationMonoid.plus(corr1, corr2).scale(z) + val scaleThenAdd = CorrelationMonoid.plus(corr1.scale(z), corr2.scale(z)) + corrApproxEq(addThenScale, scaleThenAdd) + } + } + + property("scaling does affect total weight, doesn't affect mean, variance, or correlation") { + // def sign(x: Int): Int = if (x < 0) -1 else 1 + forAll { (corr: Correlation, a0: Int) => + val a = a0 & Int.MaxValue + val scaled = corr.scale(a.toDouble) + (a == 0.0) || + approxEqOrBothNaN(EPS)(scaled.totalWeight, corr.totalWeight * a) && + approxEqOrBothNaN(EPS)(scaled.meanX, corr.meanX) && + approxEqOrBothNaN(EPS)(scaled.meanY, corr.meanY) && + approxEqOrBothNaN(EPS)(scaled.varianceX, corr.varianceX) && + approxEqOrBothNaN(EPS)(scaled.varianceY, corr.varianceY) && + approxEqOrBothNaN(EPS)(scaled.correlation, corr.correlation) + } + + } +} + +class CorrelationTest extends AnyWordSpec with Matchers { + import CorrelationLaws._ + + "correlation with y = x*x should be 0" in { + aggregateFunction(x => x * x)(testList) should equal(0.0) + } + +} From d9688ac1e3105b7c7ae471e4b59eb3f29354e307 Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Fri, 17 Jul 2020 18:08:46 -1000 Subject: [PATCH 078/306] Remove implicit from MomentsGroup and other cleanups (#844) * Remove implicit from MomentsGroup and other cleanups * optimize a few sumOption implementations * fix binary incompatibility, remove more warnings * improve deprecation message --- .../algebird/benchmark/HLLBenchmark.scala | 4 +- .../benchmark/HLLPresentBenchmark.scala | 5 +- .../benchmark/HllBatchCreateBenchmark.scala | 5 +- .../com/twitter/algebird/AveragedValue.scala | 12 +- .../com/twitter/algebird/BloomFilter.scala | 4 +- .../com/twitter/algebird/CMSHasher.scala | 6 +- .../twitter/algebird/CorrelationMonoid.scala | 60 ++++++++- .../com/twitter/algebird/CountMinSketch.scala | 8 +- .../scala/com/twitter/algebird/Group.scala | 14 +- .../com/twitter/algebird/HyperLogLog.scala | 3 +- .../scala/com/twitter/algebird/Interval.scala | 2 +- .../com/twitter/algebird/MomentsGroup.scala | 127 ++++++++++++++---- .../scala/com/twitter/algebird/Monoid.scala | 15 ++- .../com/twitter/algebird/Operators.scala | 19 ++- .../scala/com/twitter/algebird/Ring.scala | 41 +++++- .../com/twitter/algebird/MonadLaws.scala | 8 +- .../twitter/algebird/CorrelationLaws.scala | 4 +- .../algebird/DecayedVectorProperties.scala | 15 ++- .../twitter/algebird/HyperLogLogTest.scala | 15 ++- .../com/twitter/algebird/MomentsLaws.scala | 44 ++++-- build.sbt | 4 +- 21 files changed, 318 insertions(+), 97 deletions(-) diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala index 3e76e2eaf..3c3ea4e9d 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala @@ -2,7 +2,6 @@ package com.twitter.algebird package benchmark import scala.util.Random -import com.twitter.bijection._ import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole @@ -47,10 +46,9 @@ object HllBenchmark { val rng = new Random(3) - val byteEncoder = implicitly[Injection[Long, Array[Byte]]] def setSize = rng.nextInt(10) + 1 // 1 -> 10 def hll(elements: Set[Long]): HLL = - hllMonoid.batchCreate(elements)(byteEncoder) + hllMonoid.sum(elements.map(hllMonoid.toHLL[Long](_))) val inputIntermediate = (0L until numElements).map { _ => val setElements = (0 until setSize).map(_ => rng.nextInt(1000).toLong).toSet diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala index 98898e76e..15c446020 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala @@ -1,13 +1,10 @@ package com.twitter.algebird.benchmark import com.twitter.algebird.{DenseHLL, HLL, HyperLogLogMonoid, SparseHLL} -import com.twitter.bijection._ import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole object HLLPresentBenchmark { - implicit val byteEncoder = implicitly[Injection[Long, Array[Byte]]] - @State(Scope.Benchmark) class HLLPresentState { @@ -27,7 +24,7 @@ object HLLPresentBenchmark { val r = new scala.util.Random(12345L) data = (0 until numHLL).map { _ => val input = (0 until max).map(_ => r.nextLong).toSet - hllMonoid.batchCreate(input)(byteEncoder.toFunction) + hllMonoid.sum(input.map(hllMonoid.toHLL(_))) }.toIndexedSeq } } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala index 654da7b72..f625175e9 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala @@ -2,11 +2,8 @@ package com.twitter.algebird.benchmark import org.openjdk.jmh.annotations._ import com.twitter.algebird.HyperLogLogMonoid -import com.twitter.bijection._ object HllBatchCreateBenchmark { - val byteEncoder = implicitly[Injection[Long, Array[Byte]]] - val byteEncoderFn = byteEncoder.toFunction @State(Scope.Benchmark) class HLLState { @@ -35,5 +32,5 @@ class HllBatchCreateBenchmark { @Benchmark def timeBatchCreate(state: HLLState) = - state.hllMonoid.batchCreate(state.set)(byteEncoderFn) + state.hllMonoid.sum(state.set.iterator.map(state.hllMonoid.toHLL(_))) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala index beb263c9e..81630d5a3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala @@ -150,17 +150,17 @@ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[Averaged else { var count = 0L var average = 0.0 - iter.iterator.foreach { - case AveragedValue(c, v) => - average = getCombinedMean(count, average, c, v) - count += c + val it = iter.toIterator + while (it.hasNext) { + val av = it.next() + average = getCombinedMean(count, average, av.count, av.value) + count += av.count } Some(AveragedValue(count, average)) } /** - * @inheritdoc - * @see [[AveragedValue.+]] for the implementation + * combine two AveragedValue instances */ override def plus(l: AveragedValue, r: AveragedValue): AveragedValue = { val n = l.count diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index 43d30c52a..d0436c5bd 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -147,7 +147,7 @@ object BloomFilter { ): Approximate[Long] = { assert(0 <= approximationWidth && approximationWidth < 1, "approximationWidth must lie in [0, 1)") - /** + /* * s(n) is the expected number of bits that have been set to true after * n elements have been inserted into the Bloom filter. * This is \hat{S}(n) in the cardinality estimation paper used above. @@ -155,7 +155,7 @@ object BloomFilter { def s(n: Int): Double = width * (1 - scala.math.pow(1 - 1.0 / width, numHashes * n)) - /** + /* * sInverse(t) is the maximum likelihood value for the number of elements * that have been inserted into the Bloom filter when it has t bits set to true. * This is \hat{S}^{-1}(t) in the cardinality estimation paper used above. diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala b/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala index c497f6f29..8d3c1069d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala @@ -10,7 +10,7 @@ package com.twitter.algebird * count. Algebird ships with several such implicits for commonly used types `K` such as `Long` and `BigInt`. * * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as [[Double]], and then use the `contramap` + * convert items of your (unsupported) type `K` to a supported type such as Double, and then use the `contramap` * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of `contramap` * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a * starting point. @@ -31,7 +31,7 @@ trait CMSHasher[K] extends java.io.Serializable { * def hash(a: Int, b: Int, width: Int)(x: L): CMSHasher[L] = CMSHasher[K].hash(a, b, width)(f(x)) * }}} */ - def on[L](f: L => K) = new CMSHasher[L] { + def on[L](f: L => K): CMSHasher[L] = new CMSHasher[L] { override def hash(a: Int, b: Int, width: Int)(x: L): Int = self.hash(a, b, width)(f(x)) } @@ -59,7 +59,7 @@ trait CMSHasher[K] extends java.io.Serializable { * implicit val cmsHasherDouble: CMSHasher[Double] = CMSHasherArrayByte.contramap((d: Double) => f(d)) * }}} */ - def contramap[L](f: L => K) = on(f) + def contramap[L](f: L => K): CMSHasher[L] = on(f) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala index fb2eae563..63a09077d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala @@ -120,7 +120,7 @@ object CorrelationMonoid extends Monoid[Correlation] { */ override def plus(a: Correlation, b: Correlation): Correlation = { val count = a.totalWeight + b.totalWeight - if (count == 0) + if (count == 0.0) CorrelationMonoid.zero else { val prodSumRatio = a.totalWeight * b.totalWeight / count @@ -141,11 +141,67 @@ object CorrelationMonoid extends Monoid[Correlation] { } override val zero = Correlation(0, 0, 0, 0, 0, 0) + + override def sumOption(cs: TraversableOnce[Correlation]): Option[Correlation] = + if (cs.isEmpty) None + else { + val iter = cs.toIterator + val item = iter.next() + + var m0 = item.m0 + var m1y = item.m1y + var m1x = item.m1x + var m2y = item.m2y + var m2x = item.m2x + var c2 = item.c2 + + while (iter.hasNext) { + + /* + * This is tested by monoidLaws to match plus + * we do this loop here to avoid allocating + * between each pair of Correlations + */ + val b = iter.next() + val m0New = m0 + b.m0 + + if (m0New == 0.0) { + m1y = 0.0 + m1x = 0.0 + m2y = 0.0 + m2x = 0.0 + c2 = 0.0 + } else { + val prodSumRatio = m0 * b.m0 / m0New + + val m1xNew = Correlation.getCombinedMean(m0, m1x, b.m0, b.m1x) + val m1yNew = Correlation.getCombinedMean(m0, m1y, b.m0, b.m1y) + val deltaX = b.m1x - m1x + val deltaY = b.m1y - m1y + + val m2xNew = m2x + b.m2x + math.pow(deltaX, 2) * prodSumRatio + val m2yNew = + m2y + b.m2y + math.pow(deltaY, 2) * prodSumRatio + + val c2New = c2 + b.c2 + deltaX * deltaY * prodSumRatio + + m1y = m1yNew + m1x = m1xNew + m2y = m2yNew + m2x = m2xNew + c2 = c2New + } + m0 = m0New + } + + if (m0 == 0.0) Some(zero) + else Some(Correlation(c2 = c2, m2x = m2x, m2y = m2y, m1x = m1x, m1y = m1y, m0 = m0)) + } } object CorrelationAggregator extends MonoidAggregator[(Double, Double), Correlation, Correlation] { override def prepare(a: (Double, Double)): Correlation = Correlation(a) - override val monoid = CorrelationMonoid + override def monoid: Monoid[Correlation] = CorrelationMonoid override def present(c: Correlation): Correlation = c def correlation: MonoidAggregator[(Double, Double), Correlation, Double] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index 294b9e392..40aa88efc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -60,7 +60,7 @@ import scala.collection.compat._ * @author Edwin Chen */ /** - * Monoid for adding [[CMS]] sketches. + * Monoid for adding CMS sketches. * * =Usage= * @@ -73,7 +73,7 @@ import scala.collection.compat._ * Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. * * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as [[Double]], and then use the `contramap` + * convert items of your (unsupported) type `K` to a supported type such as Double, and then use the `contramap` * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of [[CMSHasher]] * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a * starting point. @@ -225,7 +225,7 @@ class CMSSummation[K](params: CMSParams[K]) { } /** - * An Aggregator for [[CMS]]. Can be created using [[CMS.aggregator]]. + * An Aggregator for [[CMS]]. Can be created using CMS.aggregator. */ case class CMSAggregator[K](cmsMonoid: CMSMonoid[K]) extends MonoidAggregator[K, CMS[K], CMS[K]] { override val monoid: CMSMonoid[K] = cmsMonoid @@ -1149,7 +1149,7 @@ case class HeavyHitter[K](item: K, count: Long) extends java.io.Serializable * Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. * * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as [[Double]], and then use the `contramap` + * convert items of your (unsupported) type `K` to a supported type such as Double, and then use the `contramap` * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of [[CMSHasher]] * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a * starting point. diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Group.scala b/algebird-core/src/main/scala/com/twitter/algebird/Group.scala index eb74c2755..b82b77cdc 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Group.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Group.scala @@ -87,10 +87,16 @@ class OptionGroup[T](implicit group: Group[T]) extends OptionMonoid[T] with Grou * negate is defined as the negation of each element of the array. */ class ArrayGroup[T: ClassTag](implicit grp: Group[T]) extends ArrayMonoid[T]() with Group[Array[T]] { - override def negate(g: Array[T]): Array[T] = - g.map { - grp.negate(_) - }.toArray + override def negate(g: Array[T]): Array[T] = { + val res = new Array[T](g.length) + var idx = 0 + while (idx < res.length) { + res(idx) = grp.negate(g(idx)) + idx = idx + 1 + } + + res + } } class FromAlgebraGroup[T](m: AGroup[T]) extends FromAlgebraMonoid(m) with Group[T] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index 5eff4aa40..7cefd6afe 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -737,7 +737,8 @@ abstract class SetSizeAggregatorBase[A](hllBits: Int, maxSetSize: Int) case class SetSizeAggregator[A](hllBits: Int, maxSetSize: Int = 10)(implicit toBytes: A => Array[Byte]) extends SetSizeAggregatorBase[A](hllBits, maxSetSize) { - override def convert(set: Set[A]): HLL = leftSemigroup.batchCreate(set.map(toBytes)) + override def convert(set: Set[A]): HLL = + leftSemigroup.sum(set.iterator.map(a => leftSemigroup.toHLL(toBytes(a)))) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala index abbdc80ad..05b3c1489 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala @@ -58,7 +58,7 @@ object Interval extends java.io.Serializable { * [[leftOpenRightClosed]] can retain the type information of the * returned interval. The compiler doesn't know anything about * ordering, so without [[MaybeEmpty]] the only valid return type - * is [[Interval[T]]]. + * is Interval[T]. */ sealed abstract class MaybeEmpty[T, NonEmpty[t] <: Interval[t]] { def isEmpty: Boolean diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index 0f75f2b53..d893757d2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -16,7 +16,7 @@ limitations under the License. package com.twitter.algebird -import algebra.CommutativeGroup +import algebra.{CommutativeGroup, CommutativeMonoid} /** * A class to calculate the first five central moments over a sequence of Doubles. @@ -57,8 +57,13 @@ case class Moments(m0: Long, m1: Double, m2: Double, m3: Double, m4: Double) { } object Moments { - implicit val group: Group[Moments] with CommutativeGroup[Moments] = + @deprecated("use monoid[Moments], this isn't lawful for negate", "0.13.8") + def group: Group[Moments] with CommutativeGroup[Moments] = MomentsGroup + + implicit val momentsMonoid: Monoid[Moments] with CommutativeMonoid[Moments] = + new MomentsMonoid + val aggregator: MomentsAggregator.type = MomentsAggregator def numericAggregator[N](implicit num: Numeric[N]): MonoidAggregator[N, Moments, Moments] = @@ -73,10 +78,7 @@ object Moments { new Moments(m0, num.toDouble(m1), num.toDouble(m2), num.toDouble(m3), num.toDouble(m4)) } -/** - * A monoid to perform moment calculations. - */ -object MomentsGroup extends Group[Moments] with CommutativeGroup[Moments] { +class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { /** * When combining averages, if the counts sizes are too close we @@ -108,35 +110,108 @@ object MomentsGroup extends Group[Moments] with CommutativeGroup[Moments] { override val zero: Moments = Moments(0L, 0.0, 0.0, 0.0, 0.0) - override def negate(a: Moments): Moments = - Moments(-a.count, a.m1, -a.m2, -a.m3, -a.m4) - // Combines the moment calculations from two streams. // See http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Higher-order_statistics // for more information on the formulas used to update the moments. override def plus(a: Moments, b: Moments): Moments = { - val delta = b.mean - a.mean val countCombined = a.count + b.count - if (countCombined == 0) - return zero - val meanCombined = getCombinedMean(a.count, a.mean, b.count, b.mean) + if (countCombined == 0L) zero + else { + val delta = b.mean - a.mean + val meanCombined = getCombinedMean(a.count, a.mean, b.count, b.mean) + + val m2 = a.m2 + b.m2 + + math.pow(delta, 2) * a.count * b.count / countCombined + + val m3 = a.m3 + b.m3 + + math.pow(delta, 3) * a.count * b.count * (a.count - b.count) / math.pow(countCombined, 2) + + 3 * delta * (a.count * b.m2 - b.count * a.m2) / countCombined + + val m4 = a.m4 + b.m4 + + math.pow(delta, 4) * a.count * b.count * (math.pow(a.count, 2) - + a.count * b.count + math.pow(b.count, 2)) / math.pow(countCombined, 3) + + 6 * math.pow(delta, 2) * (math.pow(a.count, 2) * b.m2 + + math.pow(b.count, 2) * a.m2) / math.pow(countCombined, 2) + + 4 * delta * (a.count * b.m3 - b.count * a.m3) / countCombined + + Moments(countCombined, meanCombined, m2, m3, m4) + } + } - val m2 = a.m2 + b.m2 + - math.pow(delta, 2) * a.count * b.count / countCombined + override def sumOption(items: TraversableOnce[Moments]): Option[Moments] = + if (items.isEmpty) None + else { + val iter = items.toIterator + + val init = iter.next() + + var count: Long = init.count + var mean: Double = init.mean + var m2: Double = init.m2 + var m3: Double = init.m3 + var m4: Double = init.m4 + + while (iter.hasNext) { + + /* + * Unfortunately we copy the code in plus, but we do + * it to avoid allocating a new Moments on every item + * in the loop. the Monoid laws test that sum + * matches looping on plus + */ + val b = iter.next() + + val countCombined = count + b.count + + if (countCombined == 0L) { + mean = 0.0 + m2 = 0.0 + m3 = 0.0 + m4 = 0.0 + } else { + val delta = b.mean - mean + val meanCombined = getCombinedMean(count, mean, b.count, b.mean) + + val m2Combined = m2 + b.m2 + + math.pow(delta, 2) * count * b.count / countCombined + + val m3Combined = m3 + b.m3 + + math.pow(delta, 3) * count * b.count * (count - b.count) / math.pow(countCombined, 2) + + 3 * delta * (count * b.m2 - b.count * m2) / countCombined + + val m4Combined = m4 + b.m4 + + math.pow(delta, 4) * count * b.count * (math.pow(count, 2) - + count * b.count + math.pow(b.count, 2)) / math.pow(countCombined, 3) + + 6 * math.pow(delta, 2) * (math.pow(count, 2) * b.m2 + + math.pow(b.count, 2) * m2) / math.pow(countCombined, 2) + + 4 * delta * (count * b.m3 - b.count * m3) / countCombined + + mean = meanCombined + m2 = m2Combined + m3 = m3Combined + m4 = m4Combined + } + + count = countCombined + } - val m3 = a.m3 + b.m3 + - math.pow(delta, 3) * a.count * b.count * (a.count - b.count) / math.pow(countCombined, 2) + - 3 * delta * (a.count * b.m2 - b.count * a.m2) / countCombined + Some(Moments(count, mean, m2, m3, m4)) + } +} - val m4 = a.m4 + b.m4 + - math.pow(delta, 4) * a.count * b.count * (math.pow(a.count, 2) - - a.count * b.count + math.pow(b.count, 2)) / math.pow(countCombined, 3) + - 6 * math.pow(delta, 2) * (math.pow(a.count, 2) * b.m2 + - math.pow(b.count, 2) * a.m2) / math.pow(countCombined, 2) + - 4 * delta * (a.count * b.m3 - b.count * a.m3) / countCombined +/** + * This should not be used as a group (avoid negate and minus). It was wrongly + * believed that this was a group for several years in this code, however + * it was only being tested with positive counts (which is to say the generators + * were too weak). It isn't the case that minus and negate are totally wrong + * but (a - a) + b in general isn't associative: it won't equal a - (a - b) + * which it should. + */ +@deprecated("use Moments.momentsMonoid, this isn't lawful for negative counts", "0.13.8") +object MomentsGroup extends MomentsMonoid with Group[Moments] with CommutativeGroup[Moments] { - Moments(countCombined, meanCombined, m2, m3, m4) - } + override def negate(a: Moments): Moments = + Moments(-a.count, a.m1, -a.m2, -a.m3, -a.m4) } object MomentsAggregator extends MonoidAggregator[Double, Moments, Moments] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index 4a2aa6366..9dd04a5e5 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -54,7 +54,11 @@ trait Monoid[@specialized(Int, Long, Float, Double) T] } else { None } - override def sum(vs: TraversableOnce[T]): T = sumOption(vs).getOrElse(zero) + override def sum(vs: TraversableOnce[T]): T = { + val optT = sumOption(vs) + if (optT.isDefined) optT.get + else zero + } /** * These are from algebra.Monoid @@ -83,7 +87,7 @@ class OptionMonoid[T](implicit semi: Semigroup[T]) extends Monoid[Option[T]] { } override def sumOption(items: TraversableOnce[Option[T]]): Option[Option[T]] = if (items.isEmpty) None - else Some(semi.sumOption(items.filter(_.isDefined).map(_.get))) + else Some(semi.sumOption(items.toIterator.filter(_.isDefined).map(_.get))) } class EitherMonoid[L, R](implicit semigroupl: Semigroup[L], monoidr: Monoid[R]) @@ -148,8 +152,11 @@ class ArrayMonoid[T: ClassTag](implicit semi: Semigroup[T]) extends Monoid[Array val (longer, shorter) = if (left.length > right.length) (left, right) else (right, left) val sum = longer.clone - for (i <- shorter.indices) - sum.update(i, semi.plus(sum(i), shorter(i))) + var idx = 0 + while (idx < shorter.length) { + sum(idx) = semi.plus(longer(idx), shorter(idx)) + idx = idx + 1 + } sum } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Operators.scala b/algebird-core/src/main/scala/com/twitter/algebird/Operators.scala index 449c59f4f..5a619246b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Operators.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Operators.scala @@ -16,23 +16,36 @@ limitations under the License. package com.twitter.algebird object Operators { - implicit def toPlus[T: Semigroup](t: T): PlusOp[T] = new PlusOp(t) - implicit def toMinus[T: Group](t: T): MinusOp[T] = new MinusOp(t) - implicit def toTimes[T: Ring](t: T): TimesOp[T] = new TimesOp(t) + @deprecated("use Operators.Ops", "0.13.8") + def toPlus[T: Semigroup](t: T): PlusOp[T] = new PlusOp(t) + @deprecated("use Operators.Ops", "0.13.8") + def toMinus[T: Group](t: T): MinusOp[T] = new MinusOp(t) + @deprecated("use Operators.Ops", "0.13.8") + def toTimes[T: Ring](t: T): TimesOp[T] = new TimesOp(t) + implicit def toRichTraversableFromIterator[T](t: Iterator[T]): RichTraversable[T] = new RichTraversable(t) implicit def toRichTraversable[T](t: Traversable[T]): RichTraversable[T] = new RichTraversable(t) + + implicit class Ops[A](private val a: A) extends AnyVal { + def +(other: A)(implicit sg: Semigroup[A]): A = sg.plus(a, other) + def -(other: A)(implicit g: Group[A]): A = g.minus(a, other) + def *(other: A)(implicit r: Ring[A]): A = r.times(a, other) + } } +@deprecated("use Operators.Ops", "0.13.8") class PlusOp[T: Semigroup](t: T) { def +(other: T): T = implicitly[Semigroup[T]].plus(t, other) } +@deprecated("use Operators.Ops", "0.13.8") class MinusOp[T: Group](t: T) { def -(other: T): T = implicitly[Group[T]].minus(t, other) } +@deprecated("use Operators.Ops", "0.13.8") class TimesOp[T: Ring](t: T) { def *(other: T): T = implicitly[Ring[T]].times(t, other) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala index a3037fc6f..99110c854 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala @@ -85,7 +85,10 @@ object IntRing extends Ring[Int] { override def times(l: Int, r: Int): Int = l * r override def sum(t: TraversableOnce[Int]): Int = { var sum = 0 - t.foreach(sum += _) + val iter = t.toIterator + while (iter.hasNext) { + sum += iter.next() + } sum } override def sumOption(t: TraversableOnce[Int]): Option[Int] = @@ -102,7 +105,10 @@ object ShortRing extends Ring[Short] { override def times(l: Short, r: Short): Short = (l * r).toShort override def sum(t: TraversableOnce[Short]): Short = { var sum = 0 - t.foreach(sum += _) + val iter = t.toIterator + while (iter.hasNext) { + sum += iter.next() + } sum.toShort } override def sumOption(t: TraversableOnce[Short]): Option[Short] = @@ -119,7 +125,10 @@ object LongRing extends Ring[Long] { override def times(l: Long, r: Long): Long = l * r override def sum(t: TraversableOnce[Long]): Long = { var sum = 0L - t.foreach(sum += _) + val iter = t.toIterator + while (iter.hasNext) { + sum += iter.next() + } sum } override def sumOption(t: TraversableOnce[Long]): Option[Long] = @@ -134,6 +143,19 @@ object FloatRing extends Ring[Float] { override def plus(l: Float, r: Float): Float = l + r override def minus(l: Float, r: Float): Float = l - r override def times(l: Float, r: Float): Float = l * r + + override def sumOption(t: TraversableOnce[Float]): Option[Float] = + if (t.isEmpty) None + else + Some { + var sum = 0.0 + val iter = t.toIterator + while (iter.hasNext) { + sum += iter.next().toDouble + } + + sum.toFloat + } } object DoubleRing extends Ring[Double] { @@ -143,6 +165,19 @@ object DoubleRing extends Ring[Double] { override def plus(l: Double, r: Double): Double = l + r override def minus(l: Double, r: Double): Double = l - r override def times(l: Double, r: Double): Double = l * r + + override def sumOption(t: TraversableOnce[Double]): Option[Double] = + if (t.isEmpty) None + else + Some { + var sum = 0.0 + val iter = t.toIterator + while (iter.hasNext) { + sum += iter.next() + } + + sum + } } object BooleanRing extends Ring[Boolean] { diff --git a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala index ddfafef96..5f9e74f0b 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala @@ -88,7 +88,7 @@ object MonadLaws { forAll((t: T, fn: T => M[U]) => Equiv[M[U]].equiv(t.pure[M].flatMap(fn), fn(t))) def rightIdentity[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]) = - forAll((mt: M[T]) => Equiv[M[T]].equiv(mt.flatMap(_.pure[M]), mt)) + forAll((mt: M[T]) => equiv.equiv(mt.flatMap(_.pure[M]), mt)) def associative[M[_], T, U, V](implicit monad: Monad[M], @@ -97,7 +97,7 @@ object MonadLaws { fn2: Arbitrary[U => M[V]], equiv: Equiv[M[V]] ) = forAll { (mt: M[T], f1: T => M[U], f2: U => M[V]) => - Equiv[M[V]].equiv(mt.flatMap(f1).flatMap(f2), mt.flatMap(t => f1(t).flatMap(f2))) + equiv.equiv(mt.flatMap(f1).flatMap(f2), mt.flatMap(t => f1(t).flatMap(f2))) } def monadLaws[M[_], T, U, R](implicit @@ -106,11 +106,13 @@ object MonadLaws { equivT: Equiv[M[T]], equivU: Equiv[M[U]], equivR: Equiv[M[R]], - fn1: Arbitrary[(T) => M[U]], + fn1: Arbitrary[T => M[U]], arbr: Arbitrary[M[R]], fn2: Arbitrary[U => M[R]], arbu: Arbitrary[U] ) = + // TODO: equivT and equivU are unused, only equivR is used + // but it would break binary compatibility to remove them associative[M, T, U, R] && rightIdentity[M, R] && leftIdentity[M, U, R] implicit def indexedSeqA[T](implicit arbl: Arbitrary[List[T]]): Arbitrary[IndexedSeq[T]] = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala index c92c83204..cce19e113 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala @@ -75,7 +75,9 @@ class CorrelationLaws extends CheckProperties { property("the swap method on moments works as you'd think") { forAll { l: List[(Double, Double)] => val swapped = CorrelationAggregator(l).swap - val reversedInput = CorrelationAggregator.composePrepare[(Double, Double)] { case (x, y) => (y, x) }(l) + val fn: ((Double, Double)) => (Double, Double) = { tup => tup.swap } + + val reversedInput = CorrelationAggregator.composePrepare[(Double, Double)](fn)(l) corrApproxEq(swapped, reversedInput) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala index 143ccabda..a39fad94c 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala @@ -23,8 +23,8 @@ class DecayedVectorProperties extends CheckProperties { implicit val mpint: Arbitrary[DecayedVector[({ type x[a] = Map[Int, a] })#x]] = Arbitrary { for { - t <- Gen.choose(1e-5, 200.0) // Not too high so as to avoid numerical issues - m <- Arbitrary.arbitrary[Map[Int, Double]] + t <- Gen.choose(1e-4, 200.0) // Not too high so as to avoid numerical issues + m <- Gen.mapOf(Gen.zip(Gen.choose(0, 100), Gen.choose(-1e5, 1e5))) } yield DecayedVector.forMap(m, t) } @@ -33,8 +33,13 @@ class DecayedVectorProperties extends CheckProperties { a: DecayedVector[({ type x[a] = Map[Int, a] })#x], b: DecayedVector[({ type x[a] = Map[Int, a] })#x] ) = { - def beCloseTo(a: Double, b: Double, eps: Double = 1e-6) = - a == b || (math.abs(a - b) / math.abs(a)) < eps || (a.isInfinite && b.isInfinite) || a.isNaN || b.isNaN + + def beCloseTo(a: Double, b: Double, eps: Double = 1e-5) = + a == b || + ((2.0 * math.abs(a - b)) / (math.abs(a) + math.abs(b))) < eps || + (a.isInfinite && b.isInfinite) || + (a.isNaN && b.isNaN) + val mapsAreClose = (a.vector.keySet ++ b.vector.keySet).forall { key => (a.vector.get(key), b.vector.get(key)) match { case (Some(aVal), Some(bVal)) => beCloseTo(aVal, bVal) @@ -47,7 +52,7 @@ class DecayedVectorProperties extends CheckProperties { mapsAreClose && timesAreClose } - property("DecayedVector[Map[Int, _]] is a monoid") { + property("DecayedVector[Map[Int, *]] is a monoid") { implicit val equiv = Equiv.fromFunction(decayedMapEqFn) monoidLaws[DecayedVector[({ type x[a] = Map[Int, a] })#x]] } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index ae50c1f07..0d137ca72 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -164,7 +164,7 @@ class HLLIntersectionProperty[T: Hash128: Gen](bits: Int, numHlls: Int) extends * SetSizeAggregator should work as an aggregator and return * approximate size when > maxSetSize */ -abstract class SetSizeAggregatorProperty[T](bits: Int) extends ApproximateProperty { +abstract class SetSizeAggregatorProperty[T] extends ApproximateProperty { type Exact = Set[T] type Approx = Long @@ -178,7 +178,7 @@ abstract class SetSizeAggregatorProperty[T](bits: Int) extends ApproximateProper def exactResult(set: Set[T], i: Unit) = set.size } -abstract class SmallSetSizeAggregatorProperty[T: Gen](bits: Int) extends SetSizeAggregatorProperty[T](bits) { +abstract class SmallSetSizeAggregatorProperty[T: Gen] extends SetSizeAggregatorProperty[T] { def exactGenerator: Gen[Set[T]] = for { size <- Gen.choose(maxSetSize + 1, maxSetSize * 2) @@ -189,7 +189,7 @@ abstract class SmallSetSizeAggregatorProperty[T: Gen](bits: Int) extends SetSize Approximate.exact(aggResult.toDouble) } -abstract class LargeSetSizeAggregatorProperty[T: Gen](bits: Int) extends SetSizeAggregatorProperty[T](bits) { +abstract class LargeSetSizeAggregatorProperty[T: Gen](bits: Int) extends SetSizeAggregatorProperty[T] { def exactGenerator: Gen[Set[T]] = for { size <- Gen.choose(1, maxSetSize) @@ -203,7 +203,7 @@ abstract class LargeSetSizeAggregatorProperty[T: Gen](bits: Int) extends SetSize } class SmallBytesSetSizeAggregatorProperty[T <% Array[Byte]: Gen](bits: Int) - extends SmallSetSizeAggregatorProperty[T](bits) { + extends SmallSetSizeAggregatorProperty[T] { def makeApproximate(s: Set[T]): Long = SetSizeAggregator[T](bits, maxSetSize).apply(s) } @@ -215,7 +215,7 @@ class LargeBytesSetSizeAggregatorProperty[T <% Array[Byte]: Gen](bits: Int) } class SmallSetSizeHashAggregatorProperty[T: Hash128: Gen](bits: Int) - extends SmallSetSizeAggregatorProperty[T](bits) { + extends SmallSetSizeAggregatorProperty[T] { def makeApproximate(s: Set[T]): Long = SetSizeHashAggregator[T](bits, maxSetSize).apply(s) } @@ -362,7 +362,10 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { val partialSums = data.foldLeft(IndexedSeq(mon.zero)) { (seq, value) => seq :+ (seq.last + mon.create(value)) } - (1 to 200).map(n => assert(partialSums(n) == mon.batchCreate(data.slice(0, n)))) + (1 to 200).map { n => + val bc = mon.sum(data.slice(0, n).map(mon.toHLL(_))) + assert(partialSums(n) == bc) + } } "work as an Aggregator and return a HLL" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 115ca7600..839e5f99b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -1,24 +1,48 @@ package com.twitter.algebird import com.twitter.algebird.BaseProperties._ -import com.twitter.algebird.scalacheck.arbitrary._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec +import org.scalacheck.{Arbitrary, Gen} class MomentsLaws extends CheckProperties { val EPS = 1e-10 + implicit val equiv: Equiv[Moments] = + Equiv.fromFunction { (ml, mr) => + (ml.m0 == mr.m0) && + approxEq(EPS)(ml.m1, mr.m1) && + approxEq(EPS)(ml.m2, mr.m2) && + approxEq(EPS)(ml.m3, mr.m3) && + approxEq(EPS)(ml.m4, mr.m4) + } + + def opBasedGen[A: Numeric](genA: Gen[A]): Gen[Moments] = { + val init: Gen[Moments] = genA.map(Moments(_)) + + val recur = Gen.lzy(opBasedGen[A](genA)) + val pair = Gen.zip(recur, recur) + + import Operators.Ops + + Gen.frequency( + (10, init), + (1, pair.map { case (a, b) => a + b }) + ) + } + property("Moments Group laws") { - implicit val equiv: Equiv[Moments] = - Equiv.fromFunction { (ml, mr) => - (ml.m0 == mr.m0) && - approxEq(EPS)(ml.m1, mr.m1) && - approxEq(EPS)(ml.m2, mr.m2) && - approxEq(EPS)(ml.m3, mr.m3) && - approxEq(EPS)(ml.m4, mr.m4) - } + import com.twitter.algebird.scalacheck.arbitrary.momentsArb + implicit val group: Group[Moments] = MomentsGroup groupLaws[Moments] } + + property("Moments laws tested with operational generation") { + implicit val arbMom: Arbitrary[Moments] = + Arbitrary(opBasedGen[Double](Gen.choose(-1e10, 1e10))) + + monoidLaws[Moments] + } } class MomentsTest extends AnyWordSpec with Matchers { @@ -30,7 +54,7 @@ class MomentsTest extends AnyWordSpec with Matchers { * the list's central moments. */ def getMoments(xs: List[Double]): Moments = - xs.foldLeft(MomentsGroup.zero)((m, x) => MomentsGroup.plus(m, Moments(x))) + MomentsAggregator(xs) "Moments should count" in { val m1 = getMoments(List(1, 2, 3, 4, 5)) diff --git a/build.sbt b/build.sbt index 046768359..fd01bf251 100644 --- a/build.sbt +++ b/build.sbt @@ -203,12 +203,12 @@ lazy val mimaSettings = Def.settings( * This returns the previous jar we released that is compatible with * the current. */ -val noBinaryCompatCheck = Set[String]("benchmark", "caliper", "generic", "spark") +val noBinaryCompatCheck = Set[String]("benchmark", "caliper", "spark") def previousVersion(subProj: String) = Some(subProj) .filterNot(noBinaryCompatCheck.contains) - .map(s => "com.twitter" %% ("algebird-" + s) % "0.13.5") + .map(s => "com.twitter" %% ("algebird-" + s) % "0.13.7") lazy val algebird = Project(id = "algebird", base = file(".")) .settings(sharedSettings) From ced20e017c56437bdd9b9fbe6c98cbc7c3ccfd70 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 18 Jul 2020 08:13:14 +0100 Subject: [PATCH 079/306] Update scala to 2.12.12 --- .travis.yml | 4 ++-- README.md | 2 +- build.sbt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index be56c0358..1230f0266 100644 --- a/.travis.yml +++ b/.travis.yml @@ -26,7 +26,7 @@ matrix: "++$TRAVIS_SCALA_VERSION clean" \ "++$TRAVIS_SCALA_VERSION test" - - scala: 2.12.11 + - scala: 2.12.12 jdk: openjdk8 before_install: - export PATH=${PATH}:./vendor/bundle @@ -40,7 +40,7 @@ matrix: "++$TRAVIS_SCALA_VERSION docs/makeMicrosite" \ "++$TRAVIS_SCALA_VERSION mimaReportBinaryIssues" - - scala: 2.12.11 + - scala: 2.12.12 jdk: openjdk11 script: sbt \ "++$TRAVIS_SCALA_VERSION clean" \ diff --git a/README.md b/README.md index f23d26357..2b509b1be 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ See the [Algebird website](https://twitter.github.io/algebird) for more informat ```scala > sbt algebird-core/console -Welcome to Scala 2.12.11 (OpenJDK 64-Bit Server VM, Java 11.0.1). +Welcome to Scala 2.12.12 (OpenJDK 64-Bit Server VM, Java 11.0.1). Type in expressions for evaluation. Or try :help. scala> import com.twitter.algebird._ diff --git a/build.sbt b/build.sbt index fd01bf251..5b1314633 100644 --- a/build.sbt +++ b/build.sbt @@ -36,7 +36,7 @@ def isScala213x(scalaVersion: String) = scalaBinaryVersion(scalaVersion) == "2.1 val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.11", + scalaVersion := "2.12.12", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, From 2459faec426dc69ad59de7277b511f4c943e4156 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 21 Jul 2020 03:33:52 +0200 Subject: [PATCH 080/306] Update scalafmt-core to 2.6.4 (#847) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 8aab84ea9..3ec49cbeb 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.6.3 +version=2.6.4 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From 0a0064750301db48b3a2999679fb7ca20482a789 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Tue, 21 Jul 2020 19:37:33 +0100 Subject: [PATCH 081/306] Add explicit return type rule (#846) * Add explicit return type rule * Tweak scalafix * fix scalafmt --- .scalafix.conf | 5 ++ .../benchmark/AsyncSummerBenchmark.scala | 26 +++---- .../benchmark/AveragedValueBenchmark.scala | 4 +- .../BloomFilterDistanceBenchmark.scala | 4 +- .../benchmark/CMSHashingBenchmark.scala | 4 +- .../algebird/benchmark/HLLBenchmark.scala | 6 +- .../benchmark/HLLPresentBenchmark.scala | 2 +- .../benchmark/HllBatchCreateBenchmark.scala | 3 +- .../algebird/benchmark/QTreeBenchmark.scala | 8 +-- .../benchmark/QTreeMicroBenchmark.scala | 4 +- .../algebird/benchmark/TopCMSBenchmark.scala | 12 ++-- .../algebird/benchmark/Tuple4Benchmark.scala | 8 +-- .../com/twitter/algebird/compat.scala | 6 +- .../twitter/algebird/macros/MacroCompat.scala | 6 +- .../com/twitter/algebird/AdaptiveVector.scala | 2 +- .../com/twitter/algebird/BloomFilter.scala | 2 +- .../twitter/algebird/CorrelationMonoid.scala | 2 +- .../com/twitter/algebird/CountMinSketch.scala | 2 +- .../com/twitter/algebird/DecayedVector.scala | 11 ++- .../com/twitter/algebird/Eventually.scala | 10 +-- .../scala/com/twitter/algebird/Hash128.scala | 2 +- .../com/twitter/algebird/HyperLogLog.scala | 4 +- .../scala/com/twitter/algebird/Interval.scala | 4 +- .../com/twitter/algebird/JavaMonoids.scala | 4 +- .../scala/com/twitter/algebird/Metric.scala | 30 ++++---- .../com/twitter/algebird/MinHasher.scala | 4 +- .../scala/com/twitter/algebird/Monoid.scala | 4 +- .../scala/com/twitter/algebird/Preparer.scala | 10 +-- .../scala/com/twitter/algebird/QTree.scala | 4 +- .../com/twitter/algebird/RightFolded.scala | 2 +- .../com/twitter/algebird/RightFolded2.scala | 6 +- .../scala/com/twitter/algebird/Ring.scala | 20 +++--- .../com/twitter/algebird/SGDMonoid.scala | 4 +- .../com/twitter/algebird/SummingCache.scala | 2 +- .../com/twitter/algebird/VectorSpace.scala | 6 +- .../twitter/algebird/monad/EitherMonad.scala | 2 +- .../algebird/monad/StateWithError.scala | 4 +- .../mutable/PriorityQueueAggregator.scala | 2 +- .../mutable/PriorityQueueMonoid.scala | 2 +- .../scala/com/twitter/algebird/package.scala | 2 +- .../GaussianDistributionMonoid.scala | 2 +- .../statistics/IterCallStatistics.scala | 2 +- .../twitter/algebird/generic/Instances.scala | 2 +- .../com/twitter/algebird/spark/package.scala | 2 +- .../algebird/spark/AlgebirdRDDTests.scala | 2 +- .../com/twitter/algebird/BaseProperties.scala | 2 +- .../algebird/BaseVectorSpaceProperties.scala | 15 ++-- .../com/twitter/algebird/MonadLaws.scala | 19 ++--- .../com/twitter/algebird/AggregatorLaws.scala | 3 +- .../algebird/AppendAggregatorTest.scala | 4 +- .../twitter/algebird/ApproximateTest.scala | 8 +-- .../twitter/algebird/AveragedValueLaws.scala | 3 +- .../com/twitter/algebird/BatchedTest.scala | 2 +- .../twitter/algebird/BloomFilterTest.scala | 41 +++++------ .../algebird/CollectionSpecification.scala | 16 +++-- .../twitter/algebird/CorrelationLaws.scala | 4 +- .../twitter/algebird/CountMinSketchTest.scala | 70 +++++++++---------- .../twitter/algebird/DecayedValueLaws.scala | 7 +- .../algebird/DecayedVectorProperties.scala | 2 +- .../twitter/algebird/DecayingCMSTest.scala | 2 +- .../com/twitter/algebird/EventuallyTest.scala | 24 +++---- .../scala/com/twitter/algebird/FoldTest.scala | 6 +- .../algebird/HyperLogLogSeriesTest.scala | 12 ++-- .../twitter/algebird/HyperLogLogTest.scala | 55 ++++++++------- .../com/twitter/algebird/IntervalLaws.scala | 2 +- .../com/twitter/algebird/JavaBoxedTests.scala | 13 ++-- .../scala/com/twitter/algebird/MaxLaws.scala | 13 ++-- .../twitter/algebird/MetricProperties.scala | 8 +-- .../com/twitter/algebird/MinHasherTest.scala | 16 +++-- .../scala/com/twitter/algebird/MinLaws.scala | 7 +- .../algebird/MinMaxAggregatorSpec.scala | 4 +- .../com/twitter/algebird/MomentsLaws.scala | 2 +- .../twitter/algebird/MonadInstanceLaws.scala | 2 +- .../algebird/NumericAlgebraTests.scala | 3 +- .../algebird/NumericSpecification.scala | 13 ++-- .../com/twitter/algebird/QTreeTest.scala | 11 +-- .../twitter/algebird/RightFolded2Test.scala | 6 +- .../twitter/algebird/RightFoldedTest.scala | 3 +- .../scala/com/twitter/algebird/SGDTest.scala | 20 +++--- .../com/twitter/algebird/SketchMapTest.scala | 21 +++--- .../algebird/SummingIteratorTest.scala | 2 +- .../twitter/algebird/SummingQueueTest.scala | 15 ++-- .../com/twitter/algebird/TopKTests.scala | 12 ++-- .../algebird/TupleAggregatorsTest.scala | 8 +-- .../algebird/VectorSpaceProperties.scala | 4 +- .../com/twitter/algebird/WindowLawsTest.scala | 2 +- .../GaussianDistributionMonoidTests.scala | 2 +- .../algebird/statistics/StatisticsTests.scala | 12 ++-- .../algebird/util/PromiseLinkMonoid.scala | 6 +- .../twitter/algebird/util/TunnelMonoid.scala | 6 +- .../algebird/util/summer/AsyncListSum.scala | 4 +- .../algebird/util/summer/AsyncSummer.scala | 12 ++-- .../summer/HeavyHittersCachingSummer.scala | 14 ++-- .../util/TunnelMonoidProperties.scala | 4 +- .../util/summer/AsyncSummerLaws.scala | 16 +++-- .../algebird/util/summer/Counter.scala | 2 +- 96 files changed, 431 insertions(+), 384 deletions(-) diff --git a/.scalafix.conf b/.scalafix.conf index 999c57287..4e3b87d29 100644 --- a/.scalafix.conf +++ b/.scalafix.conf @@ -1,4 +1,9 @@ rules = [ RemoveUnused ProcedureSyntax + ExplicitResultTypes ] + +ExplicitResultTypes.memberKind = [Def, Val, Var] +ExplicitResultTypes.memberVisibility = [Public] +ExplicitResultTypes.skipSimpleDefinitions = ['Lit', 'Term.New'] diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AsyncSummerBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AsyncSummerBenchmark.scala index 049bf6fe3..a7c80896c 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AsyncSummerBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AsyncSummerBenchmark.scala @@ -11,13 +11,15 @@ import com.twitter.bijection._ import com.twitter.util.{Await, Duration, FuturePool} import scala.util.Random +import com.twitter.util.ExecutorServiceFuturePool +import java.util.concurrent.ExecutorService object AsyncSummerBenchmark { - val flushFrequency = FlushFrequency(Duration.fromMilliseconds(100)) - val memoryFlushPercent = MemoryFlushPercent(80.0f) - val executor = Executors.newFixedThreadPool(4) - val workPool = FuturePool(executor) - implicit val hllMonoid = new HyperLogLogMonoid(24) + val flushFrequency: FlushFrequency = FlushFrequency(Duration.fromMilliseconds(100)) + val memoryFlushPercent: MemoryFlushPercent = MemoryFlushPercent(80.0f) + val executor: ExecutorService = Executors.newFixedThreadPool(4) + val workPool: ExecutorServiceFuturePool = FuturePool(executor) + implicit val hllMonoid: HyperLogLogMonoid = new HyperLogLogMonoid(24) def hll[T](t: T)(implicit monoid: HyperLogLogMonoid, inj: Injection[T, Array[Byte]]): HLL = monoid.create(inj(t)) @@ -137,19 +139,19 @@ class AsyncSummerBenchmark { import AsyncSummerBenchmark._ @inline - def fn(state: SummerState, summer: AsyncSummer[(Long, HLL), Map[Long, HLL]]) = { + def fn(state: SummerState, summer: AsyncSummer[(Long, HLL), Map[Long, HLL]]): Map[Long, HLL] = { val batch = Random.nextInt(state.batchCount) Await.result(summer.addAll(state.inputItems(batch))) } - def timeAsyncNonCompactListSum(state: SummerState) = + def timeAsyncNonCompactListSum(state: SummerState): Map[Long, HLL] = fn(state, state.asyncNonCompactListSum) - def timeAsyncCompactListSum(state: SummerState) = + def timeAsyncCompactListSum(state: SummerState): Map[Long, HLL] = fn(state, state.asyncCompactListSum) - def timeAsyncMapSum(state: SummerState) = fn(state, state.asyncMapSum) - def timeSyncSummingQueue(state: SummerState) = + def timeAsyncMapSum(state: SummerState): Map[Long, HLL] = fn(state, state.asyncMapSum) + def timeSyncSummingQueue(state: SummerState): Map[Long, HLL] = fn(state, state.syncSummingQueue) - def timeNullSummer(state: SummerState) = fn(state, state.nullSummer) + def timeNullSummer(state: SummerState): Map[Long, HLL] = fn(state, state.nullSummer) } @@ -160,7 +162,7 @@ case class Counter(name: String) extends Incrementor { override def incrBy(amount: Long): Unit = counter.addAndGet(amount) - def size = counter.get() + def size: Long = counter.get() override def toString: String = s"$name: size:$size" } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AveragedValueBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AveragedValueBenchmark.scala index fa059e370..362ae9b45 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AveragedValueBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/AveragedValueBenchmark.scala @@ -24,10 +24,10 @@ class AveragedValueBenchmark { import AveragedGroup.{plus, sumOption} @Benchmark - def timePlus(state: AVState, bh: Blackhole) = + def timePlus(state: AVState, bh: Blackhole): Unit = bh.consume(state.inputData.reduce(plus(_, _))) @Benchmark - def timeSumOption(state: AVState, bh: Blackhole) = + def timeSumOption(state: AVState, bh: Blackhole): Unit = bh.consume(sumOption(state.inputData)) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterDistanceBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterDistanceBenchmark.scala index aaf9ba945..b53d3a091 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterDistanceBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterDistanceBenchmark.scala @@ -28,9 +28,9 @@ object BloomFilterDistanceBenchmark { class BloomFilterState { val nbrOfElements: Int = 1000 - val falsePositiveRate = 0.01 + val falsePositiveRate: Double = 0.01 - def randomElements = + def randomElements: Seq[String] = BloomFilterCreateBenchmark.createRandomString(nbrOfElements, 10) val emptyBF1: BF[String] = diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala index a4e7cca8e..b5e810bc7 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala @@ -89,10 +89,10 @@ class CMSHashingBenchmark { h } - def timeBrokenCurrentHashWithRandomMaxBitsNumbers(state: CMSState) = + def timeBrokenCurrentHashWithRandomMaxBitsNumbers(state: CMSState): Unit = state.inputs.foreach(input => brokenCurrentHash(state.a, state.b, state.width)(input)) - def timeMurmurHashScalaWithRandomMaxBitsNumbers(state: CMSState) = + def timeMurmurHashScalaWithRandomMaxBitsNumbers(state: CMSState): Unit = state.inputs.foreach(input => murmurHashScala(state.a, state.b, state.width)(input)) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala index 3c3ea4e9d..013ea881d 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLBenchmark.scala @@ -62,13 +62,13 @@ object HllBenchmark { class HllBenchmark { import HllBenchmark._ @Benchmark - def timeSumOption(state: HLLState, bh: Blackhole) = + def timeSumOption(state: HLLState, bh: Blackhole): Unit = state.inputData.foreach(vals => bh.consume(state.hllMonoid.sumOption(vals))) @Benchmark - def timeOldSumOption(state: HLLState, bh: Blackhole) = + def timeOldSumOption(state: HLLState, bh: Blackhole): Unit = state.inputData.foreach(d => bh.consume(state.oldHllMonoid.sumOption(d))) @Benchmark - def timePlus(state: HLLState, bh: Blackhole) = + def timePlus(state: HLLState, bh: Blackhole): Unit = state.inputData.foreach(vals => bh.consume(vals.reduce(state.hllMonoid.plus(_, _)))) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala index 15c446020..b0eee084f 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala @@ -41,6 +41,6 @@ class HLLPresentBenchmark { } @Benchmark - def timeBatchCreate(state: HLLPresentState, bh: Blackhole) = + def timeBatchCreate(state: HLLPresentState, bh: Blackhole): Unit = state.data.foreach(hll => bh.consume(clone(hll).approximateSize)) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala index f625175e9..e201ed538 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HllBatchCreateBenchmark.scala @@ -2,6 +2,7 @@ package com.twitter.algebird.benchmark import org.openjdk.jmh.annotations._ import com.twitter.algebird.HyperLogLogMonoid +import com.twitter.algebird.HLL object HllBatchCreateBenchmark { @@ -31,6 +32,6 @@ class HllBatchCreateBenchmark { import HllBatchCreateBenchmark._ @Benchmark - def timeBatchCreate(state: HLLState) = + def timeBatchCreate(state: HLLState): HLL = state.hllMonoid.sum(state.set.iterator.map(state.hllMonoid.toHLL(_))) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala index 0a7bfce37..afbe4011f 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeBenchmark.scala @@ -57,15 +57,15 @@ object QTreeBenchmark { class QTreeBenchmark { import QTreeBenchmark._ @Benchmark - def timeSumOptionUnit(state: QTreeState) = + def timeSumOptionUnit(state: QTreeState): Option[QTree[Unit]] = state.qtreeUnit.sumOption(state.inputDataUnit) @Benchmark - def timeSumOptionLong(state: QTreeState) = + def timeSumOptionLong(state: QTreeState): Option[QTree[Long]] = state.qtreeLong.sumOption(state.inputDataLong) @Benchmark - def timeSumOptionDouble(state: QTreeState) = + def timeSumOptionDouble(state: QTreeState): Option[QTree[Double]] = state.qtreeDouble.sumOption(state.inputDataDouble) @Benchmark @@ -81,7 +81,7 @@ class QTreeBenchmark { state.inputDataDouble.tail.reduce(state.qtreeDouble.plus) @Benchmark - def timeQuantileBounds(state: QTreeState) = { + def timeQuantileBounds(state: QTreeState): Seq[(Double, Double)] = { state.inputDataUnit.map(_.quantileBounds(0)) state.inputDataUnit.map(_.quantileBounds(0.25)) state.inputDataUnit.map(_.quantileBounds(0.5)) diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala index ba2edd112..ac9111df4 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/QTreeMicroBenchmark.scala @@ -67,7 +67,7 @@ class QTreeMicroBenchmark { import QTreeMicroBenchmark._ @Benchmark - def doubleBranch(state: QTreeMicroState) = { + def doubleBranch(state: QTreeMicroState): Unit = { val iter = state.inputDataLong.toIterator while (iter.hasNext) { extendToLevelDoubleBranch(iter.next, state.extendToLevel) @@ -75,7 +75,7 @@ class QTreeMicroBenchmark { } @Benchmark - def singleBranch(state: QTreeMicroState) = { + def singleBranch(state: QTreeMicroState): Unit = { val iter = state.inputDataLong.toIterator while (iter.hasNext) { extendToLevelSingleBranch(iter.next, state.extendToLevel) diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala index 2ed20543d..72628c2ef 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/TopCMSBenchmark.scala @@ -72,26 +72,26 @@ class TopCMSBenchmark { import TopCMSBenchmark._ @Benchmark - def sumSmallLongTopCms(st: CMSState) = + def sumSmallLongTopCms(st: CMSState): TopCMS[Long] = sumTopCmsVector(st.smallLongs, st.cmsLongMonoid) @Benchmark - def sumSmallBigIntTopCms(st: CMSState) = + def sumSmallBigIntTopCms(st: CMSState): TopCMS[BigInt] = sumTopCmsVector(st.smallBigInts, st.cmsBigIntMonoid) @Benchmark - def sumLargeBigIntTopCms(st: CMSState) = + def sumLargeBigIntTopCms(st: CMSState): TopCMS[BigInt] = sumTopCmsVector(st.largeBigInts, st.cmsBigIntMonoid) @Benchmark - def sumSmallBigDecimalTopCms(st: CMSState) = + def sumSmallBigDecimalTopCms(st: CMSState): TopCMS[BigDecimal] = sumTopCmsVector(st.smallBigDecimals, st.cmsBigDecimalMonoid) @Benchmark - def sumLargeBigDecimalTopCms(st: CMSState) = + def sumLargeBigDecimalTopCms(st: CMSState): TopCMS[BigDecimal] = sumTopCmsVector(st.largeBigDecimals, st.cmsBigDecimalMonoid) @Benchmark - def sumLargeStringTopCms(st: CMSState) = + def sumLargeStringTopCms(st: CMSState): TopCMS[String] = sumTopCmsVector(st.largeStrings, st.cmsStringMonoid) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/Tuple4Benchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/Tuple4Benchmark.scala index ba166583a..30ec81910 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/Tuple4Benchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/Tuple4Benchmark.scala @@ -38,18 +38,18 @@ class Tuple4Benchmark { import Tuple4Benchmark._ @Benchmark - def timeTuplePlus(state: Tuple4State, bh: Blackhole) = + def timeTuplePlus(state: Tuple4State, bh: Blackhole): Unit = bh.consume(state.inputData.reduce(state.tupleMonoid.plus(_, _))) @Benchmark - def timeTupleSumOption(state: Tuple4State, bh: Blackhole) = + def timeTupleSumOption(state: Tuple4State, bh: Blackhole): Unit = bh.consume(state.tupleMonoid.sumOption(state.inputData)) @Benchmark - def timeProductPlus(state: Tuple4State, bh: Blackhole) = + def timeProductPlus(state: Tuple4State, bh: Blackhole): Unit = bh.consume(state.inputData.reduce(state.productMonoid.plus(_, _))) @Benchmark - def timeProductSumOption(state: Tuple4State, bh: Blackhole) = + def timeProductSumOption(state: Tuple4State, bh: Blackhole): Unit = bh.consume(state.productMonoid.sumOption(state.inputData)) } diff --git a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala index 0d6844d83..0b2ccb988 100644 --- a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala +++ b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/compat.scala @@ -28,6 +28,10 @@ private[algebird] trait CompatFold { private[algebird] trait CompatDecayedVector { // This is the default monoid that never thresholds. // If you want to set a specific accuracy you need to implicitly override this - implicit def monoid[F, C[_]](implicit vs: VectorSpace[F, C], metric: Metric[C[F]], ord: Ordering[F]) = + implicit def monoid[F, C[_]](implicit + vs: VectorSpace[F, C], + metric: Metric[C[F]], + ord: Ordering[F] + ): Monoid[DecayedVector[IndexedSeq]] = DecayedVector.monoidWithEpsilon(-1.0) } diff --git a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/macros/MacroCompat.scala b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/macros/MacroCompat.scala index a478977dc..b44e4ba39 100644 --- a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/macros/MacroCompat.scala +++ b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/macros/MacroCompat.scala @@ -6,11 +6,11 @@ private[algebird] object MacroCompat { type Context = whitebox.Context - def normalize(c: Context)(tpe: c.universe.Type) = tpe.etaExpand + def normalize(c: Context)(tpe: c.universe.Type): c.universe.Type = tpe.etaExpand - def declarations(c: Context)(tpe: c.universe.Type) = tpe.decls + def declarations(c: Context)(tpe: c.universe.Type): c.universe.MemberScope = tpe.decls - def companionSymbol[T](c: Context)(typeSymbol: c.universe.Symbol) = typeSymbol.companion + def companionSymbol[T](c: Context)(typeSymbol: c.universe.Symbol): c.universe.Symbol = typeSymbol.companion def typeName(c: Context)(s: String): c.universe.TypeName = c.universe.TypeName(s) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala index f0a310c9d..fbc06bc65 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala @@ -27,7 +27,7 @@ object AdaptiveVector { /** * When density >= this value * size, we switch to dense vectors */ - val THRESHOLD = 0.25 + val THRESHOLD: Double = 0.25 def fill[V](size: Int)(sparse: V): AdaptiveVector[V] = SparseVector(Map.empty[Int, V], sparse, size) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index d0436c5bd..f03686e14 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -422,7 +422,7 @@ case class BFZero[A](hashes: BFHash[A], override val width: Int) extends BF[A] { override def numHashes: Int = hashes.size - override def numBits = 0 + override def numBits: Int = 0 override def ++(other: BF[A]): BF[A] = other diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala index 63a09077d..128ee0ecc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala @@ -140,7 +140,7 @@ object CorrelationMonoid extends Monoid[Correlation] { } } - override val zero = Correlation(0, 0, 0, 0, 0, 0) + override val zero: Correlation = Correlation(0, 0, 0, 0, 0, 0) override def sumOption(cs: TraversableOnce[Correlation]): Option[Correlation] = if (cs.isEmpty) None diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index 40aa88efc..91ca05cc6 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -1344,7 +1344,7 @@ class ScopedTopNCMSMonoid[K1, K2](cms: CMS[(K1, K2)], heavyHittersN: Int = 100) object ScopedTopNCMS { - def scopedHasher[K1: CMSHasher, K2: CMSHasher] = new CMSHasher[(K1, K2)] { + def scopedHasher[K1: CMSHasher, K2: CMSHasher]: CMSHasher[(K1, K2)] = new CMSHasher[(K1, K2)] { private val k1Hasher = implicitly[CMSHasher[K1]] private val k2Hasher = implicitly[CMSHasher[K2]] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala index d37f3c3e2..fc7747752 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala @@ -25,7 +25,9 @@ object DecayedVector extends CompatDecayedVector { def buildWithHalflife[C[_]](vector: C[Double], time: Double, halfLife: Double): DecayedVector[C] = DecayedVector(vector, time * scala.math.log(2.0) / halfLife) - def monoidWithEpsilon[C[_]](eps: Double)(implicit vs: VectorSpace[Double, C], metric: Metric[C[Double]]) = + def monoidWithEpsilon[C[_]]( + eps: Double + )(implicit vs: VectorSpace[Double, C], metric: Metric[C[Double]]): Monoid[DecayedVector[C]] = new Monoid[DecayedVector[C]] { override val zero = DecayedVector(vs.group.zero, Double.NegativeInfinity) override def plus(left: DecayedVector[C], right: DecayedVector[C]) = @@ -51,13 +53,16 @@ object DecayedVector extends CompatDecayedVector { def mapMonoidWithEpsilon[K]( eps: Double - )(implicit vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], metric: Metric[Map[K, Double]]) = + )(implicit + vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], + metric: Metric[Map[K, Double]] + ): Monoid[DecayedVector[({ type x[a] = Map[K, a] })#x]] = monoidWithEpsilon[({ type x[a] = Map[K, a] })#x](eps) implicit def mapMonoid[K](implicit vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], metric: Metric[Map[K, Double]] - ) = + ): Monoid[DecayedVector[({ type x[a] = Map[K, a] })#x]] = mapMonoidWithEpsilon(-1.0) def scaledPlus[C[_]](newVal: DecayedVector[C], oldVal: DecayedVector[C], eps: Double)(implicit diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index e9088f58e..d9392a208 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -130,7 +130,7 @@ class EventuallyMonoid[E, O](convert: O => E)(mustConvert: O => Boolean)(implici ) extends EventuallySemigroup[E, O](convert)(mustConvert) with Monoid[Either[E, O]] { - override def zero = Right(Monoid.zero[O]) + override def zero: Right[E, O] = Right(Monoid.zero[O]) } @@ -162,7 +162,7 @@ class EventuallyRing[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit ) extends EventuallyGroup[E, O](convert)(mustConvert) with Ring[Either[E, O]] { - override def one = Right(Ring.one[O]) + override def one: Right[E, O] = Right(Ring.one[O]) override def times(x: Either[E, O], y: Either[E, O]): Either[E, O] = x match { @@ -181,7 +181,7 @@ class EventuallyRing[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit } trait AbstractEventuallyAggregator[A, E, O, C] extends Aggregator[A, Either[E, O], C] { - override def prepare(a: A) = Right(rightAggregator.prepare(a)) + override def prepare(a: A): Right[E, O] = Right(rightAggregator.prepare(a)) override def present(b: Either[E, O]): C = b match { case Right(o) => rightAggregator.present(o) case Left(e) => presentLeft(e) @@ -199,7 +199,7 @@ trait AbstractEventuallyAggregator[A, E, O, C] extends Aggregator[A, Either[E, O trait EventuallyAggregator[A, E, O, C] extends AbstractEventuallyAggregator[A, E, O, C] { //avoid init order issues and cyclical references - @transient override lazy val semigroup = + @transient override lazy val semigroup: EventuallySemigroup[E, O] = new EventuallySemigroup[E, O](convert)(mustConvert)(leftSemigroup, rightAggregator.semigroup) } @@ -209,6 +209,6 @@ trait EventuallyMonoidAggregator[A, E, O, C] override def rightAggregator: MonoidAggregator[A, O, C] - @transient override lazy val monoid = + @transient override lazy val monoid: EventuallyMonoid[E, O] = new EventuallyMonoid[E, O](convert)(mustConvert)(leftSemigroup, rightAggregator.monoid) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala b/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala index c1f85f71d..64a97fd69 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala @@ -45,7 +45,7 @@ trait Hash128[-K] extends java.io.Serializable { object Hash128 extends java.io.Serializable { def hash[K](k: K)(implicit h: Hash128[K]): (Long, Long) = h.hash(k) - val DefaultSeed = 12345678L + val DefaultSeed: Long = 12345678L def murmur128ArrayByte(seed: Long): Hash128[Array[Byte]] = new Hash128[Array[Byte]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index 7cefd6afe..6a8ad103e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -46,7 +46,7 @@ case class BitSetLite(in: Array[Byte]) { object HyperLogLog { /* Size of the hash in bits */ - val hashSize = 128 + val hashSize: Int = 128 private[algebird] val negativePowersOfTwo: Array[Double] = 0.to(hashSize).map(i => math.pow(2.0, -i)).toArray @@ -730,7 +730,7 @@ abstract class SetSizeAggregatorBase[A](hllBits: Int, maxSetSize: Int) override def mustConvert(set: Set[A]): Boolean = set.size > maxSetSize - override val leftSemigroup = new HyperLogLogMonoid(hllBits) + override val leftSemigroup: HyperLogLogMonoid = new HyperLogLogMonoid(hllBits) override val rightAggregator: MonoidAggregator[A, Set[A], Long] = Aggregator.uniqueCount[A].andThenPresent(_.toLong) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala index 05b3c1489..5f00f6732 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala @@ -69,14 +69,14 @@ object Interval extends java.io.Serializable { * Represents an empty interval. */ case class SoEmpty[T, NonEmpty[t] <: Interval[t]]() extends MaybeEmpty[T, NonEmpty] { - override def isEmpty = true + override def isEmpty: Boolean = true } /** * Represents a non-empty interval. */ case class NotSoEmpty[T, NonEmpty[t] <: Interval[t]](get: NonEmpty[T]) extends MaybeEmpty[T, NonEmpty] { - override def isEmpty = false + override def isEmpty: Boolean = false } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala index e26e1f236..f4f43f88b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala @@ -89,7 +89,7 @@ object JBoolRing extends Ring[JBool] { */ class JListMonoid[T] extends Monoid[JList[T]] { override def isNonZero(x: JList[T]): Boolean = !x.isEmpty - override lazy val zero = new JArrayList[T](0) + override lazy val zero: JArrayList[T] = new JArrayList[T](0) override def plus(x: JList[T], y: JList[T]): JArrayList[T] = { val res = new JArrayList[T](x.size + y.size) res.addAll(x) @@ -104,7 +104,7 @@ class JListMonoid[T] extends Monoid[JList[T]] { * TODO extend this to Group, Ring */ class JMapMonoid[K, V: Semigroup] extends Monoid[JMap[K, V]] { - override lazy val zero = new JHashMap[K, V](0) + override lazy val zero: JHashMap[K, V] = new JHashMap[K, V](0) val nonZero: (V => Boolean) = implicitly[Semigroup[V]] match { case mon: Monoid[_] => mon.isNonZero(_) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala index 5a66d0f32..56acb8718 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala @@ -41,7 +41,7 @@ object Metric { def apply[V: Metric](v1: V, v2: V): Double = implicitly[Metric[V]].apply(v1, v2) def norm[V: Metric: Monoid](v: V): Double = apply(v, Monoid.zero[V]) - def from[V](f: (V, V) => Double) = new Metric[V] { + def from[V](f: (V, V) => Double): Metric[V] = new Metric[V] { override def apply(v1: V, v2: V): Double = f(v1, v2) } @@ -88,34 +88,34 @@ object Metric { def L2Map[K, V: Monoid: Metric]: Metric[Map[K, V]] = minkowskiMap[K, V](2.0) // Implicit values - implicit val doubleMetric = + implicit val doubleMetric: Metric[Double] = Metric.from((a: Double, b: Double) => math.abs(a - b)) - implicit val intMetric = + implicit val intMetric: Metric[Int] = Metric.from((a: Int, b: Int) => math.abs((a - b).toDouble)) - implicit val longMetric = + implicit val longMetric: Metric[Long] = Metric.from((a: Long, b: Long) => math.abs((a - b).toDouble)) - implicit val floatMetric = + implicit val floatMetric: Metric[Float] = Metric.from((a: Float, b: Float) => math.abs((a.toDouble - b.toDouble))) - implicit val shortMetric = + implicit val shortMetric: Metric[Short] = Metric.from((a: Short, b: Short) => math.abs((a - b).toDouble)) - implicit val boolMetric = + implicit val boolMetric: Metric[Boolean] = Metric.from((x: Boolean, y: Boolean) => if (x ^ y) 1.0 else 0.0) - implicit val jDoubleMetric = + implicit val jDoubleMetric: Metric[JDouble] = Metric.from((a: JDouble, b: JDouble) => math.abs(a - b)) - implicit val jIntMetric = + implicit val jIntMetric: Metric[Integer] = Metric.from((a: JInt, b: JInt) => math.abs((a - b).toDouble)) - implicit val jLongMetric = + implicit val jLongMetric: Metric[JLong] = Metric.from((a: JLong, b: JLong) => math.abs((a - b).toDouble)) - implicit val jFloatMetric = + implicit val jFloatMetric: Metric[JFloat] = Metric.from((a: JFloat, b: JFloat) => math.abs((a.toDouble - b.toDouble))) - implicit val jShortMetric = + implicit val jShortMetric: Metric[JShort] = Metric.from((a: JShort, b: JShort) => math.abs((a - b).toDouble)) - implicit val jBoolMetric = + implicit val jBoolMetric: Metric[JBool] = Metric.from((x: JBool, y: JBool) => if (x ^ y) 1.0 else 0.0) // If you don't want to use L2 as your default metrics, you need to override these - implicit def iterableMetric[V: Monoid: Metric] = L2Iterable[V] - implicit def mapMetric[K, V: Monoid: Metric] = L2Map[K, V] + implicit def iterableMetric[V: Monoid: Metric]: Metric[Iterable[V]] = L2Iterable[V] + implicit def mapMetric[K, V: Monoid: Metric]: Metric[Map[K, V]] = L2Map[K, V] } @implicitNotFound(msg = "Cannot find Metric type class for ${V}") diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala index 5efa54a94..3fc0b7cde 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala @@ -142,7 +142,7 @@ class MinHasher32(numHashes: Int, numBands: Int) extends MinHasher[Int](numHashe def this(targetThreshold: Double, maxBytes: Int) = this(MinHasher.pickHashesAndBands(targetThreshold, maxBytes / 4)) - override def hashSize = 4 + override def hashSize: Int = 4 override def maxHash: Int = Int.MaxValue @@ -179,7 +179,7 @@ class MinHasher16(numHashes: Int, numBands: Int) extends MinHasher[Char](numHash def this(targetThreshold: Double, maxBytes: Int) = this(MinHasher.pickHashesAndBands(targetThreshold, maxBytes / 2)) - override def hashSize = 2 + override def hashSize: Int = 2 override def maxHash: Char = Char.MaxValue diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index 9dd04a5e5..844549401 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -93,11 +93,11 @@ class OptionMonoid[T](implicit semi: Semigroup[T]) extends Monoid[Option[T]] { class EitherMonoid[L, R](implicit semigroupl: Semigroup[L], monoidr: Monoid[R]) extends EitherSemigroup[L, R]()(semigroupl, monoidr) with Monoid[Either[L, R]] { - override lazy val zero = Right(monoidr.zero) + override lazy val zero: Right[L, R] = Right(monoidr.zero) } object StringMonoid extends Monoid[String] { - override val zero = "" + override val zero: String = "" override def plus(left: String, right: String): String = left + right override def sumOption(items: TraversableOnce[String]): Option[String] = if (items.isEmpty) None diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala b/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala index f576498d3..83ef4dfa4 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala @@ -98,7 +98,7 @@ object Preparer { /** * This is the expected entry point for creating a new Preparer. */ - def apply[A] = MapPreparer.identity[A] + def apply[A]: MapPreparer[A, A] = MapPreparer.identity[A] } /** @@ -166,13 +166,13 @@ object MapPreparer { /** * Create a concrete MapPreparer. */ - def apply[A, T](fn: A => T) = new MapPreparer[A, T] { val prepareFn: A => T = fn } + def apply[A, T](fn: A => T): MapPreparer[A, T] = new MapPreparer[A, T] { val prepareFn: A => T = fn } /** * This is purely an optimization for the case of mapping by identity. * It overrides the key methods to not actually use the identity function. */ - def identity[A] = new MapPreparer[A, A] { + def identity[A]: MapPreparer[A, A] = new MapPreparer[A, A] { override val prepareFn: A => A = (a: A) => a override def map[U](fn: A => U): MapPreparer[A, U] = MapPreparer(fn) override def flatMap[U](fn: A => TraversableOnce[U]): FlatMapPreparer[A, U] = FlatMapPreparer(fn) @@ -236,7 +236,7 @@ object FlatMapPreparer { /** * Create a concrete FlatMapPreparer. */ - def apply[A, T](fn: A => TraversableOnce[T]) = new FlatMapPreparer[A, T] { + def apply[A, T](fn: A => TraversableOnce[T]): FlatMapPreparer[A, T] = new FlatMapPreparer[A, T] { override val prepareFn: A => TraversableOnce[T] = fn } @@ -244,7 +244,7 @@ object FlatMapPreparer { * This is purely an optimization for the case of flatMapping by identity. * It overrides the key methods to not actually use the identity function. */ - def identity[A] = new FlatMapPreparer[TraversableOnce[A], A] { + def identity[A]: FlatMapPreparer[TraversableOnce[A], A] = new FlatMapPreparer[TraversableOnce[A], A] { override val prepareFn: TraversableOnce[A] => TraversableOnce[A] = (a: TraversableOnce[A]) => a override def map[U](fn: A => U): FlatMapPreparer[TraversableOnce[A], U] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index 3a9bc6e3c..3441e7cf7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -472,11 +472,11 @@ trait QTreeAggregatorLike[T] { def level: Int = QTree.DefaultLevel implicit def num: Numeric[T] def prepare(input: T): QTree[Unit] = QTree.value(num.toDouble(input), level) - def semigroup = new QTreeSemigroup[Unit](k) + def semigroup: QTreeSemigroup[Unit] = new QTreeSemigroup[Unit](k) } object QTreeAggregator { - val DefaultK = 9 + val DefaultK: Int = 9 } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala index 50210d35b..bc15bf44c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala @@ -25,7 +25,7 @@ package com.twitter.algebird * so this forms a kind of reset of the fold. */ object RightFolded { - def monoid[In, Out](foldfn: (In, Out) => Out) = + def monoid[In, Out](foldfn: (In, Out) => Out): Monoid[RightFolded[In, Out]] = new Monoid[RightFolded[In, Out]] { override val zero: RightFoldedZero.type = RightFoldedZero diff --git a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala index 3cff3ebec..8f906379f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala @@ -33,10 +33,12 @@ package com.twitter.algebird * sent to a single reducer and all the Accs are added up. */ object RightFolded2 { - def monoid[In, Out: Group](foldfn: (In, Out) => Out) = + def monoid[In, Out: Group](foldfn: (In, Out) => Out): RightFolded2Monoid[In, Out, Out] = new RightFolded2Monoid[In, Out, Out](foldfn, identity _) - def monoid[In, Out, Acc: Group](trans: (Out) => Acc)(foldfn: (In, Out) => Out) = + def monoid[In, Out, Acc: Group]( + trans: (Out) => Acc + )(foldfn: (In, Out) => Out): RightFolded2Monoid[In, Out, Acc] = new RightFolded2Monoid[In, Out, Acc](foldfn, trans) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala index 99110c854..27deb1fe9 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala @@ -77,8 +77,8 @@ class NumericRing[T](implicit num: Numeric[T]) extends Ring[T] { } object IntRing extends Ring[Int] { - override def zero = 0 - override def one = 1 + override def zero: Int = 0 + override def one: Int = 1 override def negate(v: Int): Int = -v override def plus(l: Int, r: Int): Int = l + r override def minus(l: Int, r: Int): Int = l - r @@ -117,8 +117,8 @@ object ShortRing extends Ring[Short] { } object LongRing extends Ring[Long] { - override def zero = 0L - override def one = 1L + override def zero: Long = 0L + override def one: Long = 1L override def negate(v: Long): Long = -v override def plus(l: Long, r: Long): Long = l + r override def minus(l: Long, r: Long): Long = l - r @@ -137,8 +137,8 @@ object LongRing extends Ring[Long] { } object FloatRing extends Ring[Float] { - override def one = 1.0f - override def zero = 0.0f + override def one: Float = 1.0f + override def zero: Float = 0.0f override def negate(v: Float): Float = -v override def plus(l: Float, r: Float): Float = l + r override def minus(l: Float, r: Float): Float = l - r @@ -159,8 +159,8 @@ object FloatRing extends Ring[Float] { } object DoubleRing extends Ring[Double] { - override def one = 1.0 - override def zero = 0.0 + override def one: Double = 1.0 + override def zero: Double = 0.0 override def negate(v: Double): Double = -v override def plus(l: Double, r: Double): Double = l + r override def minus(l: Double, r: Double): Double = l - r @@ -181,8 +181,8 @@ object DoubleRing extends Ring[Double] { } object BooleanRing extends Ring[Boolean] { - override def one = true - override def zero = false + override def one: Boolean = true + override def zero: Boolean = false override def negate(v: Boolean): Boolean = v override def plus(l: Boolean, r: Boolean): Boolean = l ^ r override def minus(l: Boolean, r: Boolean): Boolean = l ^ r diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala index ec94c9cfa..d7e781c2f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala @@ -52,7 +52,7 @@ object SGD { sealed abstract class SGD[+Pos] case object SGDZero extends SGD[Nothing] object SGDWeights { - def apply(w: IndexedSeq[Double]) = new SGDWeights(1L, w) + def apply(w: IndexedSeq[Double]): SGDWeights = new SGDWeights(1L, w) def average(left: SGDWeights, right: SGDWeights): SGDWeights = { val lc = left.count val rc = right.count @@ -73,7 +73,7 @@ object SGDWeights { case class SGDWeights(val count: Long, val weights: IndexedSeq[Double]) extends SGD[Nothing] object SGDPos { - def apply[Pos](p: Pos) = new SGDPos(List(p)) + def apply[Pos](p: Pos): SGDPos[Pos] = new SGDPos(List(p)) } case class SGDPos[+Pos](val pos: List[Pos]) extends SGD[Pos] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala index dbb27c0e9..ef6cd8b9c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala @@ -35,7 +35,7 @@ class SummingCache[K, V](capacity: Int)(implicit sgv: Semigroup[V]) extends Stat require(capacity >= 0, "Cannot have negative capacity in SummingIterator") - override val semigroup = new MapMonoid[K, V] + override val semigroup: MapMonoid[K, V] = new MapMonoid[K, V] protected def optNonEmpty(m: Map[K, V]): Option[Map[K, V]] = if (m.isEmpty) None else Some(m) override def put(m: Map[K, V]): Option[Map[K, V]] = { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala index d53ff963a..53e357ef7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala @@ -28,7 +28,7 @@ object VectorSpace extends VectorSpaceOps with Implicits sealed trait VectorSpaceOps { def scale[F, C[_]](v: F, c: C[F])(implicit vs: VectorSpace[F, C]): C[F] = vs.scale(v, c) - def from[F, C[_]](scaleFn: (F, C[F]) => C[F])(implicit r: Ring[F], cGroup: Group[C[F]]) = + def from[F, C[_]](scaleFn: (F, C[F]) => C[F])(implicit r: Ring[F], cGroup: Group[C[F]]): VectorSpace[F, C] = new VectorSpace[F, C] { override def ring: Ring[F] = r override def group: Group[C[F]] = cGroup @@ -39,12 +39,12 @@ sealed trait VectorSpaceOps { private object VectorSpaceOps extends VectorSpaceOps sealed trait Implicits extends LowPrioImpicits { - implicit def indexedSeqSpace[T: Ring] = + implicit def indexedSeqSpace[T: Ring]: VectorSpace[T, IndexedSeq] = VectorSpaceOps.from[T, IndexedSeq]((s, seq) => seq.map(Ring.times(s, _))) } sealed trait LowPrioImpicits { - implicit def mapSpace[K, T: Ring] = + implicit def mapSpace[K, T: Ring]: VectorSpace[T, ({ type x[a] = Map[K, a] })#x] = VectorSpaceOps.from[T, ({ type x[a] = Map[K, a] })#x] { (s, m) => m.transform { case (_, v) => Ring.times(s, v) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala index 0185858a4..4ffa9ed25 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala @@ -21,7 +21,7 @@ import com.twitter.algebird.Monad // Monad for either, used for modeling Error where L is the type of the error object EitherMonad { class Error[L] extends Monad[({ type RightType[R] = Either[L, R] })#RightType] { - override def apply[R](r: R) = Right(r) + override def apply[R](r: R): Right[L, R] = Right(r) override def flatMap[T, U](self: Either[L, T])(next: T => Either[L, U]): Either[L, U] = self.right.flatMap(next) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala index 8e2465eb7..62d8fdbab 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala @@ -107,7 +107,7 @@ object StateWithError { * Use like fromEither[Int](Right("good")) * to get a constant Either in the monad */ - def fromEither[S] = new ConstantStateMaker[S] + def fromEither[S]: ConstantStateMaker[S] = new ConstantStateMaker[S] class ConstantStateMaker[S] { def apply[F, T](either: Either[F, T]): StateWithError[S, F, T] = { (s: S) => either.right.map((s, _)) } } @@ -118,7 +118,7 @@ object StateWithError { } } // TODO this should move to Monad and work for any Monad - def toKleisli[S] = new FunctionLifter[S] + def toKleisli[S]: FunctionLifter[S] = new FunctionLifter[S] implicit def apply[S, F, T](fn: S => Either[F, (S, T)]): StateWithError[S, F, T] = StateFn(fn) implicit def monad[S, F]: Monad[({ type Result[T] = StateWithError[S, F, T] })#Result] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala index 5dc2da513..7326b3c24 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala @@ -32,7 +32,7 @@ abstract class PriorityQueueAggregator[A, +C](max: Int)(implicit ord: Ordering[A def present(q: PriorityQueue[A]): B */ - override val monoid = new PriorityQueueMonoid[A](max) + override val monoid: PriorityQueueMonoid[A] = new PriorityQueueMonoid[A](max) override final def prepare(a: A): PriorityQueue[A] = monoid.build(a) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala index 5928d1d07..f49daf957 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala @@ -49,7 +49,7 @@ class PriorityQueueMonoid[K](max: Int)(implicit ord: Ordering[K]) extends Monoid protected def limit(q: PriorityQueue[K]): Unit = while (q.size > max) { q.poll() } - override def zero = new PriorityQueue[K](MINQUEUESIZE, ord.reverse) + override def zero: PriorityQueue[K] = new PriorityQueue[K](MINQUEUESIZE, ord.reverse) override def isNonZero(q: PriorityQueue[K]): Boolean = q.size > 0 override def plus(left: PriorityQueue[K], right: PriorityQueue[K]): PriorityQueue[K] = { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/package.scala b/algebird-core/src/main/scala/com/twitter/algebird/package.scala index 3b141fe88..0591ff3ce 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/package.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/package.scala @@ -30,5 +30,5 @@ package object algebird { * To keep code using algebird.Field compiling, we export algebra Field */ type Field[V] = algebra.ring.Field[V] - val Field = algebra.ring.Field + val Field = algebra.ring.Field // scalafix:ok } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala index 41cf11e24..6f754b861 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala @@ -26,7 +26,7 @@ object GaussianDistribution { * [[http://en.wikipedia.org/wiki/Sum_of_normally_distributed_random_variables]] */ object GaussianDistributionMonoid extends Monoid[GaussianDistribution] { - override def zero = new GaussianDistribution(0, 0) + override def zero: GaussianDistribution = new GaussianDistribution(0, 0) override def plus(left: GaussianDistribution, right: GaussianDistribution): GaussianDistribution = new GaussianDistribution(left.mean + right.mean, left.sigma2 + right.sigma2) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala index ebc34289a..bd1e756cd 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala @@ -27,7 +27,7 @@ private class IterCallStatistics(threadSafe: Boolean) { private class Statistics(threadSafe: Boolean) { import scala.math.min import java.lang.Long.numberOfLeadingZeros - val maxBucket = 10 + val maxBucket: Int = 10 val distribution: IndexedSeq[Counter] = IndexedSeq.fill(maxBucket + 1)(Counter(threadSafe)) val total: Counter = Counter(threadSafe) diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala index c64cd62cd..beaa7a4eb 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala @@ -122,7 +122,7 @@ class HConsSemigroup[A, B <: HList](protected val a: Semigroup[A], protected val Some(bufA.flush.get :: bufB.flush.get) } - override val hashCode = (a, b).hashCode + override val hashCode: Int = (a, b).hashCode override def equals(that: Any): Boolean = that match { diff --git a/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala b/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala index 05bb1fb3c..f8fba3ee0 100644 --- a/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala +++ b/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala @@ -14,7 +14,7 @@ package object spark { * spark exposes an Aggregator type, so this is here to avoid shadowing */ type AlgebirdAggregator[A, B, C] = Aggregator[A, B, C] - val AlgebirdAggregator = Aggregator + val AlgebirdAggregator: Aggregator.type = Aggregator implicit class ToAlgebird[T](val rdd: RDD[T]) extends AnyVal { def algebird: AlgebirdRDD[T] = new AlgebirdRDD[T](rdd) diff --git a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala index b6de4bdf6..13634d238 100644 --- a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala +++ b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala @@ -11,7 +11,7 @@ package test { // not needed in the algebird package, just testing the API import com.twitter.algebird.spark.ToAlgebird object Test { - def sum[T: Monoid: ClassTag](r: RDD[T]) = r.algebird.sum + def sum[T: Monoid: ClassTag](r: RDD[T]): T = r.algebird.sum } } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala index ba62cb889..fab1f4185 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala @@ -168,7 +168,7 @@ object BaseProperties extends MetricProperties { } class DefaultHigherEq[M[_]] extends HigherEq[M] { - override def apply[T](m: M[T], n: M[T]) = m == n + override def apply[T](m: M[T], n: M[T]): Boolean = m == n } def isNonZero[V: Semigroup](v: V): Boolean = diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala index faf0b9e9e..533903a81 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseVectorSpaceProperties.scala @@ -18,6 +18,7 @@ package com.twitter.algebird import org.scalacheck.Arbitrary import org.scalacheck.Prop.forAll +import org.scalacheck.Prop /** * Base properties for VectorSpace tests. @@ -25,12 +26,12 @@ import org.scalacheck.Prop.forAll object BaseVectorSpaceProperties { def isEqualIfZero[F, C[_]]( eqfn: (C[F], C[F]) => Boolean - )(implicit vs: VectorSpace[F, C], arb: Arbitrary[C[F]]) = + )(implicit vs: VectorSpace[F, C], arb: Arbitrary[C[F]]): Prop = forAll((a: C[F]) => eqfn(VectorSpace.scale(vs.field.zero, a), vs.group.zero)) def distributesWithPlus[F, C[_]]( eqfn: (C[F], C[F]) => Boolean - )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]) = + )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]): Prop = forAll { (a: C[F], b: C[F], c: F) => val v1 = VectorSpace.scale(c, vs.group.plus(a, b)) val v2 = vs.group.plus(VectorSpace.scale(c, a), VectorSpace.scale(c, b)) @@ -39,7 +40,7 @@ object BaseVectorSpaceProperties { def isAssociative[F, C[_]]( eqfn: (C[F], C[F]) => Boolean - )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]) = + )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]): Prop = forAll { (a: C[F], b: F, c: F) => val v1 = VectorSpace.scale(c, VectorSpace.scale(b, a)) val v2 = VectorSpace.scale(vs.field.times(c, b), a) @@ -48,12 +49,12 @@ object BaseVectorSpaceProperties { def identityOne[F, C[_]]( eqfn: (C[F], C[F]) => Boolean - )(implicit vs: VectorSpace[F, C], arb: Arbitrary[C[F]]) = + )(implicit vs: VectorSpace[F, C], arb: Arbitrary[C[F]]): Prop = forAll((a: C[F]) => eqfn(VectorSpace.scale(vs.field.one, a), a)) def distributesOverScalarPlus[F, C[_]]( eqfn: (C[F], C[F]) => Boolean - )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]) = + )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]): Prop = forAll { (a: C[F], b: F, c: F) => val v1 = VectorSpace.scale(vs.field.plus(b, c), a) val v2 = vs.group.plus(VectorSpace.scale(b, a), VectorSpace.scale(c, a)) @@ -62,13 +63,13 @@ object BaseVectorSpaceProperties { def vectorSpaceLaws[F, C[_]]( eqfn: (C[F], C[F]) => Boolean - )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]) = + )(implicit vs: VectorSpace[F, C], arbC: Arbitrary[C[F]], arbF: Arbitrary[F]): Prop = isEqualIfZero(eqfn) && distributesWithPlus(eqfn) && isAssociative(eqfn) && identityOne( eqfn ) && distributesOverScalarPlus( eqfn ) - def beCloseTo(a: Double, b: Double) = + def beCloseTo(a: Double, b: Double): Boolean = a == b || (math.abs(a - b) / math.abs(a)) < 1e-10 || (a.isInfinite && b.isInfinite) || a.isNaN || b.isNaN } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala index 5f9e74f0b..bc46aac3d 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala @@ -20,6 +20,7 @@ import org.scalacheck.Arbitrary import org.scalacheck.Prop.forAll import scala.math.Equiv import Monad.{operators, pureOp} +import org.scalacheck.Prop /** * Basic Monad laws, useful for testing any monad. @@ -27,7 +28,7 @@ import Monad.{operators, pureOp} object MonadLaws { // $COVERAGE-OFF$Turn off coverage for deprecated laws. @deprecated("No longer used. Use Equiv[T] instance", since = "0.13.0") - def defaultEq[T] = { (t0: T, t1: T) => (t0 == t1) } + def defaultEq[T]: (T, T) => Boolean = { (t0: T, t1: T) => (t0 == t1) } @deprecated("use leftIdentity[T]", since = "0.13.0") def leftIdentityEquiv[M[_], T, U](implicit @@ -35,11 +36,11 @@ object MonadLaws { arb: Arbitrary[T], arbfn: Arbitrary[(T) => M[U]], equiv: Equiv[M[U]] - ) = + ): Prop = leftIdentity[M, T, U] @deprecated("use rightIdentity[T]", since = "0.13.0") - def rightIdentityEquiv[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]) = + def rightIdentityEquiv[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]): Prop = rightIdentity[M, T] @deprecated("use associative[T]", since = "0.13.0") @@ -49,7 +50,7 @@ object MonadLaws { fn1: Arbitrary[(T) => M[U]], fn2: Arbitrary[U => M[V]], equiv: Equiv[M[V]] - ) = + ): Prop = associative[M, T, U, V] // Just generate a map and use that as a function: @@ -75,7 +76,7 @@ object MonadLaws { arbr: Arbitrary[M[R]], fn2: Arbitrary[U => M[R]], arbu: Arbitrary[U] - ) = + ): Prop = monadLaws[M, T, U, R] // $COVERAGE-ON$ @@ -84,10 +85,10 @@ object MonadLaws { arb: Arbitrary[T], arbfn: Arbitrary[(T) => M[U]], equiv: Equiv[M[U]] - ) = + ): Prop = forAll((t: T, fn: T => M[U]) => Equiv[M[U]].equiv(t.pure[M].flatMap(fn), fn(t))) - def rightIdentity[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]) = + def rightIdentity[M[_], T](implicit monad: Monad[M], arb: Arbitrary[M[T]], equiv: Equiv[M[T]]): Prop = forAll((mt: M[T]) => equiv.equiv(mt.flatMap(_.pure[M]), mt)) def associative[M[_], T, U, V](implicit @@ -96,7 +97,7 @@ object MonadLaws { fn1: Arbitrary[(T) => M[U]], fn2: Arbitrary[U => M[V]], equiv: Equiv[M[V]] - ) = forAll { (mt: M[T], f1: T => M[U], f2: U => M[V]) => + ): Prop = forAll { (mt: M[T], f1: T => M[U], f2: U => M[V]) => equiv.equiv(mt.flatMap(f1).flatMap(f2), mt.flatMap(t => f1(t).flatMap(f2))) } @@ -110,7 +111,7 @@ object MonadLaws { arbr: Arbitrary[M[R]], fn2: Arbitrary[U => M[R]], arbu: Arbitrary[U] - ) = + ): Prop = // TODO: equivT and equivU are unused, only equivR is used // but it would break binary compatibility to remove them associative[M, T, U, R] && rightIdentity[M, R] && leftIdentity[M, U, R] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala index 84636ee52..59eb9ee32 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala @@ -18,6 +18,7 @@ package com.twitter.algebird import org.scalacheck.Arbitrary import org.scalacheck.Prop._ +import org.scalacheck.Prop class AggregatorLaws extends CheckProperties { @@ -85,7 +86,7 @@ class AggregatorLaws extends CheckProperties { } } - def checkNumericSum[T: Arbitrary](implicit num: Numeric[T]) = + def checkNumericSum[T: Arbitrary](implicit num: Numeric[T]): Prop = forAll { in: List[T] => val aggregator = Aggregator.numericSum[T] aggregator(in) == in.map(num.toDouble).sum diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala index 8b7bdab42..3e4d91f53 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AppendAggregatorTest.scala @@ -4,8 +4,8 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class AppendAggregatorTest extends AnyWordSpec with Matchers { - val data = Vector.fill(100)(scala.util.Random.nextInt(100)) - val mpty = Vector.empty[Int] + val data: Vector[Int] = Vector.fill(100)(scala.util.Random.nextInt(100)) + val mpty: Vector[Int] = Vector.empty[Int] // test the methods that appendSemigroup method defines or overrides def testMethodsSemigroup[E, M, P]( diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala index 5a38df194..50fbd57c6 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala @@ -13,7 +13,7 @@ class ApproximateLaws extends CheckProperties { import com.twitter.algebird.BaseProperties._ import org.scalacheck.Gen.choose - implicit val approxGen = + implicit val approxGen: Arbitrary[Approximate[Long]] = Arbitrary { for { v0 <- choose(0L, (1L << 15) - 2) @@ -35,7 +35,7 @@ class ApproximateLaws extends CheckProperties { ((ap1 + (ap1.negate)) ~ 0L) && ((ap2 + (ap2.negate)) ~ 0L) } } - def boundsAreOrdered[N](ap: Approximate[N]) = { + def boundsAreOrdered[N](ap: Approximate[N]): Boolean = { val n = ap.numeric n.lteq(ap.min, ap.estimate) && n.lteq(ap.estimate, ap.max) } @@ -51,8 +51,8 @@ class ApproximateLaws extends CheckProperties { } } - val trueGen = choose(0.0, 1.0).map(ApproximateBoolean(true, _)) - val falseGen = choose(0.0, 1.0).map(ApproximateBoolean(false, _)) + val trueGen: Gen[ApproximateBoolean] = choose(0.0, 1.0).map(ApproximateBoolean(true, _)) + val falseGen: Gen[ApproximateBoolean] = choose(0.0, 1.0).map(ApproximateBoolean(false, _)) implicit val approxArb: Arbitrary[ApproximateBoolean] = Arbitrary( diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala index 7aade0133..70107ed76 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AveragedValueLaws.scala @@ -5,6 +5,7 @@ import com.twitter.algebird.scalacheck.arbitrary._ import com.twitter.algebird.scalacheck.NonEmptyVector import org.scalacheck.Arbitrary import org.scalacheck.Prop.forAll +import org.scalacheck.Prop class AveragedValueLaws extends CheckProperties { def avg[T](v: Vector[T])(implicit num: Numeric[T]): Double = { @@ -51,7 +52,7 @@ class AveragedValueLaws extends CheckProperties { } } - def numericAggregatorTest[T: Numeric: Arbitrary] = + def numericAggregatorTest[T: Numeric: Arbitrary]: Prop = forAll { v: NonEmptyVector[T] => val averaged = AveragedValue.numericAggregator[T].apply(v.items) approxEq(1e-10)(avg(v.items), averaged) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala index 7e72938c4..93fa9d6a6 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala @@ -22,7 +22,7 @@ import Helpers.arbitraryBatched class BatchedLaws extends CheckProperties { import BaseProperties._ - implicit val arbitraryBigDecimalsHere = + implicit val arbitraryBigDecimalsHere: Arbitrary[BigDecimal] = BaseProperties.arbReasonableBigDecimals def testBatchedMonoid[A: Arbitrary: Monoid](name: String, size: Int): Unit = { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala index bfaf377b6..1228d6ecd 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala @@ -6,6 +6,7 @@ import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Prop._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec +import scala.util.Random object BloomFilterTestUtils { def toSparse[A](bf: BF[A]): BFSparse[A] = bf match { @@ -33,10 +34,10 @@ class BloomFilterLaws extends CheckProperties { import com.twitter.algebird.BaseProperties._ import BloomFilterTestUtils._ - val NUM_HASHES = 6 - val WIDTH = 32 + val NUM_HASHES: Int = 6 + val WIDTH: Int = 32 - implicit val bfMonoid = new BloomFilterMonoid[String](NUM_HASHES, WIDTH) + implicit val bfMonoid: BloomFilterMonoid[String] = new BloomFilterMonoid[String](NUM_HASHES, WIDTH) implicit val bfGen: Arbitrary[BF[String]] = Arbitrary { @@ -115,8 +116,8 @@ class BloomFilterLaws extends CheckProperties { class BFHashIndices extends CheckProperties { - val NUM_HASHES = 10 - val WIDTH = 4752800 + val NUM_HASHES: Int = 10 + val WIDTH: Int = 4752800 implicit val bfHash: Arbitrary[BFHash[String]] = Arbitrary { @@ -134,7 +135,7 @@ class BFHashIndices extends CheckProperties { * This is the version of the BFHash as of before the "negative values fix" */ case class NegativeBFHash(numHashes: Int, width: Int) { - val size = numHashes + val size: Int = numHashes def apply(s: String): Stream[Int] = nextHash(s.getBytes, numHashes) @@ -188,30 +189,30 @@ class BloomFilterFalsePositives[T: Gen: Hash128](falsePositiveRate: Double) exte type Input = T type Result = Boolean - val maxNumEntries = 1000 + val maxNumEntries: Int = 1000 - def exactGenerator = + def exactGenerator: Gen[Set[T]] = for { numEntries <- Gen.choose(1, maxNumEntries) set <- Gen.containerOfN[Set, T](numEntries, implicitly[Gen[T]]) } yield set - def makeApproximate(set: Set[T]) = { + def makeApproximate(set: Set[T]): BF[T] = { val bfMonoid = BloomFilter[T](set.size, falsePositiveRate) val values = set.toSeq bfMonoid.create(values: _*) } - def inputGenerator(set: Set[T]) = + def inputGenerator(set: Set[T]): Gen[T] = for { randomValues <- Gen.listOfN[T](set.size, implicitly[Gen[T]]) x <- Gen.oneOf((set ++ randomValues).toSeq) } yield x - def exactResult(s: Set[T], t: T) = s.contains(t) + def exactResult(s: Set[T], t: T): Boolean = s.contains(t) - def approximateResult(bf: BF[T], t: T) = bf.contains(t) + def approximateResult(bf: BF[T], t: T): ApproximateBoolean = bf.contains(t) } class BloomFilterCardinality[T: Gen: Hash128] extends ApproximateProperty { @@ -222,26 +223,26 @@ class BloomFilterCardinality[T: Gen: Hash128] extends ApproximateProperty { type Input = Unit type Result = Long - val maxNumEntries = 10000 - val falsePositiveRate = 0.01 + val maxNumEntries: Int = 10000 + val falsePositiveRate: Double = 0.01 - def exactGenerator = + def exactGenerator: Gen[Set[T]] = for { numEntries <- Gen.choose(1, maxNumEntries) set <- Gen.containerOfN[Set, T](numEntries, implicitly[Gen[T]]) } yield set - def makeApproximate(set: Set[T]) = { + def makeApproximate(set: Set[T]): BF[T] = { val bfMonoid = BloomFilter[T](set.size, falsePositiveRate) val values = set.toSeq bfMonoid.create(values: _*) } - def inputGenerator(set: Set[T]) = Gen.const(()) + def inputGenerator(set: Set[T]): Gen[Unit] = Gen.const(()) - def exactResult(s: Set[T], u: Unit) = s.size - def approximateResult(bf: BF[T], u: Unit) = bf.size + def exactResult(s: Set[T], u: Unit): Long = s.size + def approximateResult(bf: BF[T], u: Unit): Approximate[Long] = bf.size } class BloomFilterProperties extends ApproximateProperties("BloomFilter") { @@ -262,7 +263,7 @@ class BloomFilterProperties extends ApproximateProperties("BloomFilter") { class BloomFilterTest extends AnyWordSpec with Matchers { - val RAND = new scala.util.Random + val RAND: Random = new scala.util.Random "BloomFilter" should { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala index 57cae8346..ac8b1bda9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala @@ -5,6 +5,7 @@ import org.scalacheck.{Arbitrary, Gen} import scala.collection.mutable.{Map => MMap} import scala.collection.{Map => ScMap} import org.scalacheck.Prop._ +import org.scalacheck.Prop class CollectionSpecification extends CheckProperties { import com.twitter.algebird.BaseProperties._ @@ -84,7 +85,7 @@ class CollectionSpecification extends CheckProperties { monoidLaws[Set[Int]] } - implicit def mapArb[K: Arbitrary, V: Arbitrary: Monoid] = Arbitrary { + implicit def mapArb[K: Arbitrary, V: Arbitrary: Monoid] = Arbitrary { // scalafix:ok val mv = implicitly[Monoid[V]] implicitly[Arbitrary[Map[K, V]]].arbitrary .map { @@ -92,17 +93,20 @@ class CollectionSpecification extends CheckProperties { } } - implicit def scMapArb[K: Arbitrary, V: Arbitrary: Monoid] = Arbitrary { + implicit def scMapArb[K: Arbitrary, V: Arbitrary: Monoid]: Arbitrary[ScMap[K, V]] = Arbitrary { mapArb[K, V].arbitrary .map { map: Map[K, V] => map: ScMap[K, V] } } - implicit def mMapArb[K: Arbitrary, V: Arbitrary: Monoid] = Arbitrary { + implicit def mMapArb[K: Arbitrary, V: Arbitrary: Monoid]: Arbitrary[MMap[K, V]] = Arbitrary { mapArb[K, V].arbitrary .map { map: Map[K, V] => MMap(map.toSeq: _*): MMap[K, V] } } - def mapPlusTimesKeys[M <: ScMap[Int, Int]](implicit rng: Ring[ScMap[Int, Int]], arbMap: Arbitrary[M]) = + def mapPlusTimesKeys[M <: ScMap[Int, Int]](implicit + rng: Ring[ScMap[Int, Int]], + arbMap: Arbitrary[M] + ): Prop = forAll { (a: M, b: M) => // Subsets because zeros are removed from the times/plus values ((rng.times(a, b)).keys.toSet.subsetOf((a.keys.toSet & b.keys.toSet)) && @@ -298,8 +302,8 @@ class CollectionSpecification extends CheckProperties { } } - def square(x: Int) = if (x % 2 == 0) Some(x * x) else None - def mapEq[K] = MapAlgebra.sparseEquiv[K, Int] + def square(x: Int): Option[Int] = if (x % 2 == 0) Some(x * x) else None + def mapEq[K]: Equiv[Map[K, Int]] = MapAlgebra.sparseEquiv[K, Int] property("MapAlgebra.mergeLookup works") { forAll { (items: Set[Int]) => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala index cce19e113..1eb793c69 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala @@ -7,13 +7,13 @@ import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.scalacheck.arbitrary._ object CorrelationLaws { - val EPS = 1e-10 + val EPS: Double = 1e-10 def aggregateFunction(f: Double => Double): Aggregator[Double, Correlation, Double] = CorrelationAggregator.correlation .composePrepare[Double](x => (x, f(x))) - val testList = Range.inclusive(-10, 10).map(_.toDouble).toList + val testList: List[Double] = Range.inclusive(-10, 10).map(_.toDouble).toList def corrApproxEq(corr1: Correlation, corr2: Correlation): Boolean = approxEqOrBothNaN(EPS)(corr1.c2, corr2.c2) && diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala index 1560b7c74..e10418ffe 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala @@ -16,9 +16,9 @@ object CmsLaws { class CmsLaws extends CheckProperties { import BaseProperties._ - val DELTA = 1e-8 - val EPS = 0.005 - val SEED = 1 + val DELTA: Double = 1e-8 + val EPS: Double = 0.005 + val SEED: Int = 1 def monoid[K: CMSHasher]: CMSMonoid[K] = CMS.monoid[K](EPS, DELTA, SEED) @@ -83,10 +83,10 @@ class CmsLaws extends CheckProperties { class TopPctCmsLaws extends CheckProperties { import BaseProperties._ - val DELTA = 1e-8 - val EPS = 0.005 - val SEED = 1 - val HEAVY_HITTERS_PCT = 0.1 + val DELTA: Double = 1e-8 + val EPS: Double = 0.005 + val SEED: Int = 1 + val HEAVY_HITTERS_PCT: Double = 0.1 def monoid[K: CMSHasher]: TopCMSMonoid[K] = TopPctCMS.monoid[K](EPS, DELTA, SEED, HEAVY_HITTERS_PCT) @@ -148,9 +148,9 @@ class TopPctCmsLaws extends CheckProperties { class SparseCMSTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { - val DELTA = 1e-8 - val EPS = 0.005 - val SEED = 1 + val DELTA: Double = 1e-8 + val EPS: Double = 0.005 + val SEED: Int = 1 "correctly count SparseCMS numbers" in { val cmsMonoid = CMS.monoid[Int](EPS, DELTA, SEED) @@ -165,9 +165,9 @@ class SparseCMSTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPrope class CMSInstanceTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { - val DELTA = 1e-8 - val EPS = 0.005 - val SEED = 1 + val DELTA: Double = 1e-8 + val EPS: Double = 0.005 + val SEED: Int = 1 "correctly count CMSItem numbers" in { val cmsMonoid = CMS.monoid[Int](EPS, DELTA, SEED) @@ -318,11 +318,11 @@ class CMSBytesTest extends CMSTest[Bytes](CmsLaws.int2Bytes(_)) abstract class CmsProperty[K] extends ApproximateProperty object CmsProperty { - val delta = 1e-10 - val eps = 0.001 - val seed = 1 + val delta: Double = 1e-10 + val eps: Double = 0.001 + val seed: Int = 1 - def makeApproximate[K: CMSHasher](exact: Vector[K]) = { + def makeApproximate[K: CMSHasher](exact: Vector[K]): CMS[K] = { val cmsMonoid: CMSMonoid[K] = CMS.monoid(eps, delta, seed) cmsMonoid.sum(exact.map(cmsMonoid.create(_))) } @@ -335,11 +335,11 @@ abstract class CmsFrequencyProperty[K: CMSHasher: Gen] extends CmsProperty { type Input = K type Result = Long - def makeApproximate(e: Exact) = CmsProperty.makeApproximate(e) + def makeApproximate(e: Exact): CMS[K] = CmsProperty.makeApproximate(e) def inputGenerator(e: Vector[K]): Gen[K] = Gen.oneOf(e) - def exactResult(vec: Vector[K], key: K) = vec.count(_ == key) - def approximateResult(cms: CMS[K], key: K) = cms.frequency(key) + def exactResult(vec: Vector[K], key: K): Long = vec.count(_ == key) + def approximateResult(cms: CMS[K], key: K): Approximate[Long] = cms.frequency(key) } class CmsSmallFrequencyProperty[K: CMSHasher: Gen] extends CmsFrequencyProperty[K] { @@ -359,7 +359,7 @@ class CmsInnerProductProperty[K: CMSHasher: Gen] extends CmsProperty[K] { type Input = Unit type Result = Long - def makeApproximate(exacts: (Vector[K], Vector[K])) = + def makeApproximate(exacts: (Vector[K], Vector[K])): (CMS[K], CMS[K]) = (CmsProperty.makeApproximate(exacts._1), CmsProperty.makeApproximate(exacts._2)) def exactGenerator: Gen[(Vector[K], Vector[K])] = @@ -370,13 +370,13 @@ class CmsInnerProductProperty[K: CMSHasher: Gen] extends CmsProperty[K] { def inputGenerator(e: (Vector[K], Vector[K])): Gen[Unit] = Gen.const(()) - def exactResult(lists: (Vector[K], Vector[K]), input: Unit) = { + def exactResult(lists: (Vector[K], Vector[K]), input: Unit): Long = { val counts1 = lists._1.groupBy(identity).mapValues(_.size) val counts2 = lists._2.groupBy(identity).mapValues(_.size) (counts1.keys.toSet & counts2.keys.toSet).toSeq.map(k => counts1(k) * counts2(k)).sum } - def approximateResult(cmses: (CMS[K], CMS[K]), input: Unit) = + def approximateResult(cmses: (CMS[K], CMS[K]), input: Unit): Approximate[Long] = cmses._1.innerProduct(cmses._2) } @@ -387,23 +387,23 @@ class CmsTotalCountProperty[K: CMSHasher: Gen] extends CmsProperty[K] { type Input = Unit type Result = Long - def makeApproximate(exact: Vector[K]) = CmsProperty.makeApproximate(exact) + def makeApproximate(exact: Vector[K]): CMS[K] = CmsProperty.makeApproximate(exact) def exactGenerator: Gen[Vector[K]] = Gen.containerOfN[Vector, K](10000, implicitly[Gen[K]]) def inputGenerator(e: Vector[K]): Gen[Unit] = Gen.const(()) - def exactResult(list: Vector[K], input: Unit) = list.length + def exactResult(list: Vector[K], input: Unit): Long = list.length - def approximateResult(cms: CMS[K], input: Unit) = + def approximateResult(cms: CMS[K], input: Unit): Approximate[Long] = Approximate.exact(cms.totalCount) } class CmsProperties extends ApproximateProperties("CountMinSketch") { import ApproximateProperty.toProp - implicit val intGen = Gen.choose(1, 100) + implicit val intGen: Gen[Int] = Gen.choose(1, 100) property("CMS works for small lists") = toProp(new CmsSmallFrequencyProperty[Int](), 10, 10, 0.01) property("CMS works for large lists") = toProp(new CmsLargeFrequencyProperty[Int](), 10, 10, 0.01) @@ -418,9 +418,9 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) with Matchers with ScalaCheckDrivenPropertyChecks { - val DELTA = 1e-10 - val EPS = 0.001 - val SEED = 1 + val DELTA: Double = 1e-10 + val EPS: Double = 0.001 + val SEED: Int = 1 private[this] val maxDepth = 70 private[this] val maxWidth = 1000 @@ -430,13 +430,13 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) // We use TopPctCMS for testing CMSCounting functionality. We argue that because TopPctCMS[K] encapsulates CMS[K] // and uses it for all its counting/querying functionality (like an adapter) we can test CMS[K] indirectly through // testing TopPctCMS[K]. - val COUNTING_CMS_MONOID = { + val COUNTING_CMS_MONOID: TopPctCMSMonoid[K] = { val ANY_HEAVY_HITTERS_PCT = 0.1 // heavy hitters functionality is not relevant for the tests using this monoid TopPctCMS.monoid[K](EPS, DELTA, SEED, ANY_HEAVY_HITTERS_PCT) } - val RAND = new scala.util.Random + val RAND: Random = new scala.util.Random /** * Returns the elements in {data} that appear at least heavyHittersPct * data.size times. @@ -986,9 +986,9 @@ class CMSFunctionsSpec extends AnyPropSpec with ScalaCheckPropertyChecks with Ma class CMSParamsSpec extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { - val AnyEps = 0.001 - val AnyDelta = 1e-5 - val AnyHashes = { + val AnyEps: Double = 0.001 + val AnyDelta: Double = 1e-5 + val AnyHashes: Seq[CMSHash[Long]] = { val AnySeed = 1 CMSFunctions.generateHashes[Long](AnyEps, AnyDelta, AnySeed) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala index 60477dd43..404a2b410 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayedValueLaws.scala @@ -5,13 +5,14 @@ import com.twitter.algebird.BaseProperties._ import com.twitter.algebird.scalacheck.arbitrary._ import org.scalacheck.Gen.choose import org.scalacheck.Prop._ +import org.scalacheck.Prop class DecayedValueLaws extends CheckProperties { - val EPS = 0.1 + val EPS: Double = 0.1 case class Params(mean: Double, halfLife: Double, count: Int, maxNoise: Double) - implicit val decayedMonoid = DecayedValue.monoidWithEpsilon(0.001) + implicit val decayedMonoid: Monoid[DecayedValue] = DecayedValue.monoidWithEpsilon(0.001) property("DecayedValue Monoid laws") { implicit val equiv: Equiv[DecayedValue] = @@ -21,7 +22,7 @@ class DecayedValueLaws extends CheckProperties { commutativeMonoidLaws[DecayedValue] } - def averageApproxEq(fn: (DecayedValue, Params) => Double)(implicit p: Arbitrary[Params]) = + def averageApproxEq(fn: (DecayedValue, Params) => Double)(implicit p: Arbitrary[Params]): Prop = forAll { (params: Params) => val rand = new scala.util.Random val data = (0 to params.count).map { t => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala index a39fad94c..c83ed8cdb 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala @@ -32,7 +32,7 @@ class DecayedVectorProperties extends CheckProperties { def decayedMapEqFn( a: DecayedVector[({ type x[a] = Map[Int, a] })#x], b: DecayedVector[({ type x[a] = Map[Int, a] })#x] - ) = { + ): Boolean = { def beCloseTo(a: Double, b: Double, eps: Double = 1e-5) = a == b || diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala index c2099646e..195df1e32 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala @@ -11,7 +11,7 @@ class DecayingCMSProperties extends CheckProperties { // override val generatorDrivenConfig = // PropertyCheckConfiguration(minSuccessful = 1000) - val eps = 1e-5 + val eps: Double = 1e-5 def close(a: Double, b: Double): Boolean = if (a == b) { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index f77cc3a88..a1560d707 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -9,8 +9,8 @@ import org.scalatest.wordspec.AnyWordSpec class EventuallyRingLaws extends CheckProperties { import BaseProperties._ - val lGen = for (v <- Gen.choose(0L, 1L << 30L)) yield Left(v) - val rGen = for (v <- Gen.choose(0, 10000)) yield Right(v) + val lGen: Gen[Left[Long, Nothing]] = for (v <- Gen.choose(0L, 1L << 30L)) yield Left(v) + val rGen: Gen[Right[Nothing, Int]] = for (v <- Gen.choose(0, 10000)) yield Right(v) implicit val eitherArb: Arbitrary[Either[Long, Int]] = Arbitrary(Gen.oneOf(lGen, rGen)) @@ -44,12 +44,12 @@ class EventuallyRingLaws extends CheckProperties { class EventuallyMonoidLaws extends CheckProperties { import BaseProperties._ - implicit val eventuallyMonoid = + implicit val eventuallyMonoid: EventuallyMonoid[Int, String] = new EventuallyMonoid[Int, String](_.length)(_.length > 100) - val lGen = for (v <- Gen.choose(0, 1 << 14)) yield Left(v) - val rGen = for (v <- Gen.alphaStr) yield Right(v) - implicit val arb = Arbitrary(Gen.oneOf(lGen, rGen)) + val lGen: Gen[Left[Int, Nothing]] = for (v <- Gen.choose(0, 1 << 14)) yield Left(v) + val rGen: Gen[Right[Nothing, String]] = for (v <- Gen.alphaStr) yield Right(v) + implicit val arb: Arbitrary[Either[Int, String]] = Arbitrary(Gen.oneOf(lGen, rGen)) property("EventuallyMonoid is a Monoid") { monoidLaws[Either[Int, String]] @@ -59,17 +59,17 @@ class EventuallyMonoidLaws extends CheckProperties { class EventuallyTest extends AnyWordSpec with Matchers { - val eventuallyMonoid = + val eventuallyMonoid: EventuallyMonoid[Int, String] = new EventuallyMonoid[Int, String](_.length)(_.length > 100) - val short = "foo" - val med = Stream.continually("bar").take(20).mkString("") - val long = Stream.continually("bell").take(100).mkString("") + val short: String = "foo" + val med: String = Stream.continually("bar").take(20).mkString("") + val long: String = Stream.continually("bell").take(100).mkString("") // max batch is 1000 - val listOfRights = + val listOfRights: List[Either[Int, String]] = Stream.continually[Either[Int, String]](Right(short)).take(1010).toList - val listOfLefts = Stream + val listOfLefts: List[Either[Int, String]] = Stream .continually[Either[Int, String]](Left(short.length)) .take(1010) .toList diff --git a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala index ed0268a7e..906e5750e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/FoldTest.scala @@ -9,13 +9,13 @@ class FoldTest extends AnyWordSpec { def runCase(fold: Fold[I, O]): O } case class Zero[I, O](expected: O) extends Case[I, O] { - override def runCase(fold: Fold[I, O]) = fold.overEmpty + override def runCase(fold: Fold[I, O]): O = fold.overEmpty } case class One[I, O](in: I, expected: O) extends Case[I, O] { - override def runCase(fold: Fold[I, O]) = fold.overSingleton(in) + override def runCase(fold: Fold[I, O]): O = fold.overSingleton(in) } case class Many[I, O](in: Traversable[I], expected: O) extends Case[I, O] { - override def runCase(fold: Fold[I, O]) = fold.overTraversable(in) + override def runCase(fold: Fold[I, O]): O = fold.overTraversable(in) } def run[I, O](fold: Fold[I, O], cases: Case[I, O]*): Unit = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala index 7c670116d..71508b668 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogSeriesTest.scala @@ -8,7 +8,7 @@ import HyperLogLog.{int2Bytes, long2Bytes} class HyperLogLogSeriesLaws extends CheckProperties { import BaseProperties._ - implicit val monoid = new HyperLogLogSeriesMonoid(8) + implicit val monoid: HyperLogLogSeriesMonoid = new HyperLogLogSeriesMonoid(8) case class Timestamp(value: Long) @@ -76,11 +76,11 @@ class HLLSeriesSinceProperty extends ApproximateProperty { type Input = Long type Result = Long - val bits = 12 - val monoid = new HyperLogLogSeriesMonoid(bits) - val hll = new HyperLogLogMonoid(bits) + val bits: Int = 12 + val monoid: HyperLogLogSeriesMonoid = new HyperLogLogSeriesMonoid(bits) + val hll: HyperLogLogMonoid = new HyperLogLogMonoid(bits) - def makeApproximate(timestampedData: Seq[(Long, Long)]) = { + def makeApproximate(timestampedData: Seq[(Long, Long)]): HLLSeries = { val hllSeries = timestampedData .map { case (value, timestamp) => monoid.create(value, timestamp) } monoid.sum(hllSeries) @@ -96,7 +96,7 @@ class HLLSeriesSinceProperty extends ApproximateProperty { def inputGenerator(timestampedData: Exact): Gen[Long] = Gen.oneOf(timestampedData).map { case (_, timestamp) => timestamp } - def approximateResult(series: HLLSeries, timestamp: Long) = + def approximateResult(series: HLLSeries, timestamp: Long): Approximate[Long] = series.since(timestamp).toHLL.approximateSize def exactResult(timestampedData: Seq[(Long, Long)], timestamp: Long): Long = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index 0d137ca72..f2bd75dc5 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -9,6 +9,7 @@ import java.lang.AssertionError import org.scalatest.matchers.should.Matchers import org.scalatest.propspec.AnyPropSpec import org.scalatest.wordspec.AnyWordSpec +import java.{util => ju} object ReferenceHyperLogLog { @@ -51,8 +52,8 @@ class HyperLogLogLaws extends CheckProperties { import BaseProperties._ import HyperLogLog._ - val bits = 8 - implicit val hllMonoid = new HyperLogLogMonoid(bits) + val bits: Int = 8 + implicit val hllMonoid: HyperLogLogMonoid = new HyperLogLogMonoid(bits) implicit val hllGen: Arbitrary[HLL] = Arbitrary(Gen.choose(0L, 1000000L).map(v => hllMonoid.create(long2Bytes(v)))) @@ -90,12 +91,12 @@ class HyperLogLogLaws extends CheckProperties { class jRhoWMatchTest extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { import HyperLogLog._ - implicit val hashGen = Arbitrary { + implicit val hashGen: Arbitrary[Array[Byte]] = Arbitrary { Gen.containerOfN[Array, Byte](16, Arbitrary.arbitrary[Byte]) } /* For some reason choose in this version of scalacheck is bugged so I need the suchThat clause */ - implicit val bitsGen = Arbitrary { + implicit val bitsGen: Arbitrary[Int] = Arbitrary { Gen.choose(4, 31).suchThat(x => x >= 4 && x <= 31) } @@ -105,7 +106,7 @@ class jRhoWMatchTest extends AnyPropSpec with ScalaCheckPropertyChecks with Matc } abstract class HyperLogLogProperty(bits: Int) extends ApproximateProperty { - val monoid = new HyperLogLogMonoid(bits) + val monoid: HyperLogLogMonoid = new HyperLogLogMonoid(bits) def iterableToHLL[T: Hash128](it: Iterable[T]): HLL = monoid.sum(it.map(monoid.toHLL(_))) @@ -118,22 +119,22 @@ class HLLCountProperty[T: Hash128: Gen](bits: Int) extends HyperLogLogProperty(b type Input = Unit type Result = Long - def makeApproximate(it: Iterable[T]) = iterableToHLL(it) + def makeApproximate(it: Iterable[T]): HLL = iterableToHLL(it) - def exactGenerator = Gen.containerOf[Vector, T](implicitly[Gen[T]]) + def exactGenerator: Gen[Vector[T]] = Gen.containerOf[Vector, T](implicitly[Gen[T]]) - def inputGenerator(it: Exact) = Gen.const(()) - def approximateResult(a: HLL, i: Unit) = a.approximateSize - def exactResult(it: Iterable[T], i: Unit) = it.toSet.size + def inputGenerator(it: Exact): Gen[Unit] = Gen.const(()) + def approximateResult(a: HLL, i: Unit): Approximate[Long] = a.approximateSize + def exactResult(it: Iterable[T], i: Unit): Long = it.toSet.size } class HLLDownsizeCountProperty[T: Hash128: Gen](numItems: Int, oldBits: Int, newBits: Int) extends HLLCountProperty[T](oldBits) { - override def exactGenerator = + override def exactGenerator: Gen[Vector[T]] = Gen.containerOfN[Vector, T](numItems, implicitly[Gen[T]]) - override def approximateResult(a: HLL, i: Unit) = + override def approximateResult(a: HLL, i: Unit): Approximate[Long] = a.downsize(newBits).approximateSize } @@ -144,7 +145,7 @@ class HLLIntersectionProperty[T: Hash128: Gen](bits: Int, numHlls: Int) extends type Input = Unit type Result = Long - def makeApproximate(it: Seq[Seq[T]]) = it.map(iterableToHLL(_)) + def makeApproximate(it: Seq[Seq[T]]): Approx = it.map(iterableToHLL(_)) def exactGenerator: Gen[Seq[Seq[T]]] = { val vectorGenerator: Gen[Seq[T]] = @@ -152,11 +153,11 @@ class HLLIntersectionProperty[T: Hash128: Gen](bits: Int, numHlls: Int) extends Gen.containerOfN[Vector, Seq[T]](numHlls, vectorGenerator) } - def inputGenerator(it: Exact) = Gen.const(()) + def inputGenerator(it: Exact): Gen[Unit] = Gen.const(()) - def approximateResult(hlls: Seq[HLL], i: Unit) = monoid.intersectionSize(hlls) + def approximateResult(hlls: Seq[HLL], i: Unit): Approximate[Long] = monoid.intersectionSize(hlls) - def exactResult(it: Seq[Seq[T]], i: Unit) = + def exactResult(it: Seq[Seq[T]], i: Unit): Long = it.map(_.toSet).reduce(_.intersect(_)).size } @@ -171,11 +172,11 @@ abstract class SetSizeAggregatorProperty[T] extends ApproximateProperty { type Input = Unit type Result = Double - val maxSetSize = 10000 + val maxSetSize: Int = 10000 - def inputGenerator(it: Exact) = Gen.const(()) + def inputGenerator(it: Exact): Gen[Unit] = Gen.const(()) - def exactResult(set: Set[T], i: Unit) = set.size + def exactResult(set: Set[T], i: Unit): Double = set.size } abstract class SmallSetSizeAggregatorProperty[T: Gen] extends SetSizeAggregatorProperty[T] { @@ -185,7 +186,7 @@ abstract class SmallSetSizeAggregatorProperty[T: Gen] extends SetSizeAggregatorP set <- Gen.containerOfN[Set, T](size, implicitly[Gen[T]]) } yield set - def approximateResult(aggResult: Long, i: Unit) = + def approximateResult(aggResult: Long, i: Unit): Approximate[Double] = Approximate.exact(aggResult.toDouble) } @@ -196,7 +197,7 @@ abstract class LargeSetSizeAggregatorProperty[T: Gen](bits: Int) extends SetSize set <- Gen.containerOfN[Set, T](size, implicitly[Gen[T]]) } yield set - def approximateResult(aggResult: Long, i: Unit) = { + def approximateResult(aggResult: Long, i: Unit): Approximate[Double] = { val error = 1.04 / scala.math.sqrt(1 << bits) Approximate[Double](aggResult - error, aggResult, aggResult + error, 0.9972) } @@ -229,8 +230,8 @@ class LargeSetSizeHashAggregatorProperty[T: Hash128: Gen](bits: Int) class HLLProperties extends ApproximateProperties("HyperLogLog") { import ApproximateProperty.toProp - implicit val intGen = Gen.chooseNum(Int.MinValue, Int.MaxValue) - implicit val longGen = Gen.chooseNum(Long.MinValue, Long.MaxValue) + implicit val intGen: Gen[Int] = Gen.chooseNum(Int.MinValue, Int.MaxValue) + implicit val longGen: Gen[Long] = Gen.chooseNum(Long.MinValue, Long.MaxValue) for (bits <- List(5, 6, 7, 8, 10)) { property(s"Count ints with $bits bits") = toProp(new HLLCountProperty[Int](bits), 100, 1, 0.01) @@ -259,8 +260,8 @@ class SetSizeAggregatorProperties extends ApproximateProperties("SetSizeAggregat import ApproximateProperty.toProp import HyperLogLog.int2Bytes - implicit val intGen = Gen.chooseNum(Int.MinValue, Int.MaxValue) - implicit val longGen = Gen.chooseNum(Long.MinValue, Long.MaxValue) + implicit val intGen: Gen[Int] = Gen.chooseNum(Int.MinValue, Int.MaxValue) + implicit val longGen: Gen[Long] = Gen.chooseNum(Long.MinValue, Long.MaxValue) for (bits <- List(5, 7, 8, 10)) { property( @@ -285,10 +286,10 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { import HyperLogLog._ //Get the implicit int2bytes, long2Bytes - val r = new java.util.Random + val r: ju.Random = new java.util.Random def exactCount[T](it: Iterable[T]): Int = it.toSet.size - def approxCount[T <% Array[Byte]](bits: Int, it: Iterable[T]) = { + def approxCount[T <% Array[Byte]](bits: Int, it: Iterable[T]): Double = { val hll = new HyperLogLogMonoid(bits) hll.sizeOf(hll.sum(it.map(hll.create(_)))).estimate.toDouble } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala index 266f44c40..737c7f85f 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala @@ -146,7 +146,7 @@ class IntervalLaws extends CheckProperties { forAll((x: Long, y: Long) => ((y >= x) == InclusiveLower(x).intersects(InclusiveUpper(y)))) } - def lowerUpperIntersection(low: Lower[Long], upper: Upper[Long], items: List[Long]) = + def lowerUpperIntersection(low: Lower[Long], upper: Upper[Long], items: List[Long]): Boolean = if (low.intersects(upper)) { low.least .map { lb => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala index 0685866da..727dd80c0 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala @@ -6,22 +6,23 @@ import java.util.{List => JList, Map => JMap} import org.scalacheck.{Arbitrary, Gen} import scala.collection.JavaConverters._ +import java.{util => ju} class JavaBoxedTests extends CheckProperties { import com.twitter.algebird.BaseProperties._ - implicit val jboolArg = Arbitrary { + implicit val jboolArg: Arbitrary[JBool] = Arbitrary { for (v <- Gen.oneOf(JBool.TRUE, JBool.FALSE)) yield v } - implicit val jintArg = Arbitrary { + implicit val jintArg: Arbitrary[Integer] = Arbitrary { for (v <- Gen.choose(Int.MinValue, Int.MaxValue)) yield JInt.valueOf(v) } - implicit val jshortArg = Arbitrary { + implicit val jshortArg: Arbitrary[JShort] = Arbitrary { for (v <- Gen.choose(Short.MinValue, Short.MaxValue)) yield Short.box(v) } - implicit val jlongArg = Arbitrary { + implicit val jlongArg: Arbitrary[JLong] = Arbitrary { // If we put Long.Max/Min we get overflows that seem to break the ring properties, not clear why for (v <- Gen.choose(Int.MinValue, Int.MaxValue)) yield JLong.valueOf(v) @@ -53,7 +54,7 @@ class JavaBoxedTests extends CheckProperties { // TODO add testing with JFloat/JDouble but check for approximate equals, pain in the ass. - implicit def jlist[T: Arbitrary] = Arbitrary { + implicit def jlist[T: Arbitrary]: Arbitrary[ju.List[T]] = Arbitrary { implicitly[Arbitrary[List[T]]].arbitrary.map(_.asJava) } @@ -61,7 +62,7 @@ class JavaBoxedTests extends CheckProperties { monoidLaws[JList[Int]] } - implicit def jmap[K: Arbitrary, V: Arbitrary: Semigroup] = Arbitrary { + implicit def jmap[K: Arbitrary, V: Arbitrary: Semigroup]: Arbitrary[ju.Map[K, V]] = Arbitrary { implicitly[Arbitrary[Map[K, V]]].arbitrary.map { _.filter(kv => isNonZero[V](kv._2)).asJava } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala index bf7e36818..361ed2a45 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MaxLaws.scala @@ -5,27 +5,28 @@ import com.twitter.algebird.scalacheck.arbitrary._ import com.twitter.algebird.scalacheck.NonEmptyVector import org.scalacheck.Arbitrary import org.scalacheck.Prop.forAll +import org.scalacheck.Prop class MaxLaws extends CheckProperties { - def maxTest[T: Arbitrary: Ordering] = + def maxTest[T: Arbitrary: Ordering]: Prop = forAll { (l: Max[T], r: Max[T]) => val realMax = Max(Ordering[T].max(l.get, r.get)) l + r == realMax && (l.max(r)) == realMax } - def maxSemiGroupTest[T: Arbitrary: Ordering] = + def maxSemiGroupTest[T: Arbitrary: Ordering]: Prop = forAll { v: NonEmptyVector[T] => val maxItems = v.items.map(Max(_)) v.items.max == Max.semigroup[T].combineAllOption(maxItems).get.get } // Test equiv import. - val equiv = implicitly[Equiv[Max[Int]]] + val equiv: Equiv[Max[Int]] = implicitly[Equiv[Max[Int]]] // Testing that these ones can be found - val sgInt = implicitly[Semigroup[Max[Int]]] - val sgString = implicitly[Semigroup[Max[String]]] - val monoidString = implicitly[Monoid[Max[String]]] + val sgInt: Semigroup[Max[Int]] = implicitly[Semigroup[Max[Int]]] + val sgString: Semigroup[Max[String]] = implicitly[Semigroup[Max[String]]] + val monoidString: Monoid[Max[String]] = implicitly[Monoid[Max[String]]] property("Max.{ +, max } works on ints")(maxTest[Int]) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MetricProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/MetricProperties.scala index d4cd29c98..9de3aa708 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MetricProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MetricProperties.scala @@ -39,10 +39,10 @@ class MetricLaws extends CheckProperties { metricLaws[Short] } - implicit val iterMetric = Metric.L1Iterable[Double] + implicit val iterMetric: Metric[Iterable[Double]] = Metric.L1Iterable[Double] // TODO: we won't need this when we have an Equatable trait - def listEqFn(a: List[Double], b: List[Double]) = { + def listEqFn(a: List[Double], b: List[Double]): Boolean = { val maxSize = scala.math.max(a.size, b.size) val diffA = maxSize - a.size val diffB = maxSize - b.size @@ -56,10 +56,10 @@ class MetricLaws extends CheckProperties { metricLaws[List[Double]] } - implicit val mapMetric = Metric.L1Map[Int, Double] + implicit val mapMetric: Metric[Map[Int, Double]] = Metric.L1Map[Int, Double] // TODO: we won't need this when we have an Equatable trait - def mapEqFn(a: Map[Int, Double], b: Map[Int, Double]) = + def mapEqFn(a: Map[Int, Double], b: Map[Int, Double]): Boolean = (a.keySet ++ b.keySet).forall { key => (a.get(key), b.get(key)) match { case (Some(aVal), Some(bVal)) => aVal == bVal diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala index 998b92c38..1e4453710 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala @@ -5,10 +5,12 @@ import org.scalacheck.{Arbitrary, Gen} import scala.math.Equiv import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec +import org.scalatest.Assertion +import java.{util => ju} class MinHasherTest extends CheckProperties { - implicit val mhMonoid = new MinHasher32(0.5, 512) - implicit val mhGen = Arbitrary { + implicit val mhMonoid: MinHasher32 = new MinHasher32(0.5, 512) + implicit val mhGen: Arbitrary[MinHashSignature] = Arbitrary { for (v <- Gen.choose(0, 10000)) yield (mhMonoid.init(v)) } @@ -19,9 +21,9 @@ class MinHasherTest extends CheckProperties { } class MinHasherSpec extends AnyWordSpec with Matchers { - val r = new java.util.Random + val r: ju.Random = new java.util.Random - def test[H](mh: MinHasher[H], similarity: Double, epsilon: Double) = { + def test[H](mh: MinHasher[H], similarity: Double, epsilon: Double): Assertion = { val (set1, set2) = randomSets(similarity) val exact = exactSimilarity(set1, set2) @@ -30,7 +32,7 @@ class MinHasherSpec extends AnyWordSpec with Matchers { assert(error < epsilon) } - def randomSets(similarity: Double) = { + def randomSets(similarity: Double): (Set[Double], Set[Double]) = { val s = 10000 val uniqueFraction = if (similarity == 1.0) 0.0 else (1 - similarity) / (1 + similarity) @@ -51,10 +53,10 @@ class MinHasherSpec extends AnyWordSpec with Matchers { (unique1 ++ shared, unique2 ++ shared) } - def exactSimilarity[T](x: Set[T], y: Set[T]) = + def exactSimilarity[T](x: Set[T], y: Set[T]): Double = (x & y).size.toDouble / (x ++ y).size - def approxSimilarity[T, H](mh: MinHasher[H], x: Set[T], y: Set[T]) = { + def approxSimilarity[T, H](mh: MinHasher[H], x: Set[T], y: Set[T]): Double = { val sig1 = x .map(l => mh.init(l.toString)) .reduce((a, b) => mh.plus(a, b)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala index fb9a7ba01..5b5d379a7 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinLaws.scala @@ -5,22 +5,23 @@ import com.twitter.algebird.scalacheck.arbitrary._ import com.twitter.algebird.scalacheck.NonEmptyVector import org.scalacheck.Arbitrary import org.scalacheck.Prop.forAll +import org.scalacheck.Prop class MinLaws extends CheckProperties { - def minTest[T: Arbitrary: Ordering] = + def minTest[T: Arbitrary: Ordering]: Prop = forAll { (l: Min[T], r: Min[T]) => val realMin = Min(Ordering[T].min(l.get, r.get)) l + r == realMin && (l.min(r)) == realMin } - def minSemigroupTest[T: Arbitrary: Ordering] = + def minSemigroupTest[T: Arbitrary: Ordering]: Prop = forAll { v: NonEmptyVector[T] => val minItems = v.items.map(Min(_)) v.items.min == Min.semigroup[T].combineAllOption(minItems).get.get } // Test equiv import. - val equiv = implicitly[Equiv[Min[Int]]] + val equiv: Equiv[Min[Int]] = implicitly[Equiv[Min[Int]]] property("Min.{ +, min } works on ints")(minTest[Int]) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala index 54afae509..767ceb447 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinMaxAggregatorSpec.scala @@ -9,7 +9,7 @@ class MinMaxAggregatorSpec extends AnyWordSpec with Matchers { case object TestElementB extends TestElementParent case object TestElementC extends TestElementParent - implicit val testOrdering = Ordering.fromLessThan[TestElementParent]((x, y) => + implicit val testOrdering: Ordering[TestElementParent] = Ordering.fromLessThan[TestElementParent]((x, y) => (x, y) match { case (TestElementA, TestElementA) => false case (TestElementA, _) => true @@ -20,7 +20,7 @@ class MinMaxAggregatorSpec extends AnyWordSpec with Matchers { } ) - val data = List(TestElementC, TestElementA, TestElementB) + val data: List[TestElementParent] = List(TestElementC, TestElementA, TestElementB) "MinAggregator" should { "produce the minimum value" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 839e5f99b..aa0fa344f 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -6,7 +6,7 @@ import org.scalatest.wordspec.AnyWordSpec import org.scalacheck.{Arbitrary, Gen} class MomentsLaws extends CheckProperties { - val EPS = 1e-10 + val EPS: Double = 1e-10 implicit val equiv: Equiv[Moments] = Equiv.fromFunction { (ml, mr) => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala index 3e4a8f748..048be69fc 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MonadInstanceLaws.scala @@ -62,7 +62,7 @@ class MonadInstanceLaws extends CheckProperties { } class MutableBox(var item: Int) { - def inc(v: Int) = item += v + def inc(v: Int): Unit = item += v } property("Reader behaves correctly") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/NumericAlgebraTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/NumericAlgebraTests.scala index ae3a7c49f..683aab915 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/NumericAlgebraTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/NumericAlgebraTests.scala @@ -1,8 +1,9 @@ package com.twitter.algebird +import org.scalacheck.Arbitrary class NumericAlgebraTests extends CheckProperties { import BaseProperties._ - implicit val arbitraryBigDecimalsHere = + implicit val arbitraryBigDecimalsHere: Arbitrary[BigDecimal] = BaseProperties.arbReasonableBigDecimals property(s"test int") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala index 441375259..05bbccc2e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala @@ -4,6 +4,7 @@ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalacheck.Arbitrary import org.scalatest.matchers.should.Matchers import org.scalatest.propspec.AnyPropSpec +import org.scalatest.compatible.Assertion /** * Tests abstract algebra against scala's Numeric trait @@ -11,7 +12,7 @@ import org.scalatest.propspec.AnyPropSpec * below to test all the numeric traits. */ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { - def plusNumericProp[T: Monoid: Numeric: Arbitrary] = forAll { (a: T, b: T) => + def plusNumericProp[T: Monoid: Numeric: Arbitrary]: Assertion = forAll { (a: T, b: T) => val mon = implicitly[Monoid[T]] val num = implicitly[Numeric[T]] assert(num.plus(a, b) == mon.plus(a, b)) @@ -32,7 +33,7 @@ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks wit plusNumericProp[Float] } - def zeroNumericProp[T: Monoid: Group: Numeric: Arbitrary] = forAll { (a: T) => + def zeroNumericProp[T: Monoid: Group: Numeric: Arbitrary]: Assertion = forAll { (a: T) => val mon = implicitly[Monoid[T]] val grp = implicitly[Group[T]] val num = implicitly[Numeric[T]] @@ -44,7 +45,7 @@ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks wit ) } - def zeroProps[T: Monoid: Numeric] = { + def zeroProps[T: Monoid: Numeric]: Assertion = { val mon = implicitly[Monoid[T]] val num = implicitly[Numeric[T]] assert( @@ -73,7 +74,7 @@ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks wit zeroProps[Float] } - def minusNumericProp[T: Group: Numeric: Arbitrary] = forAll { (a: T, b: T) => + def minusNumericProp[T: Group: Numeric: Arbitrary]: Assertion = forAll { (a: T, b: T) => val grp = implicitly[Group[T]] val num = implicitly[Numeric[T]] assert(num.minus(a, b) == grp.minus(a, b)) @@ -95,7 +96,7 @@ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks wit minusNumericProp[Float] } - def oneNumericProp[T: Ring: Numeric: Arbitrary] = forAll { (a: T) => + def oneNumericProp[T: Ring: Numeric: Arbitrary]: Assertion = forAll { (a: T) => val ring = implicitly[Ring[T]] val num = implicitly[Numeric[T]] assert( @@ -119,7 +120,7 @@ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks wit oneNumericProp[Float] } - def timesNumericProp[T: Ring: Numeric: Arbitrary] = forAll { (a: T, b: T) => + def timesNumericProp[T: Ring: Numeric: Arbitrary]: Assertion = forAll { (a: T, b: T) => val ring = implicitly[Ring[T]] val num = implicitly[Numeric[T]] assert(num.times(a, b) == ring.times(a, b)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala index 9815f71b0..60aef7eff 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala @@ -20,12 +20,13 @@ import org.scalacheck.Arbitrary import org.scalacheck.Gen.choose import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec +import scala.collection.immutable class QTreeLaws extends CheckProperties { import BaseProperties._ - implicit val qtSemigroup = new QTreeSemigroup[Long](4) - implicit val qtGen = Arbitrary { + implicit val qtSemigroup: QTreeSemigroup[Long] = new QTreeSemigroup[Long](4) + implicit val qtGen: Arbitrary[QTree[Long]] = Arbitrary { for (v <- choose(0L, 10000L)) yield (QTree(v)) } @@ -35,10 +36,10 @@ class QTreeLaws extends CheckProperties { } class QTreeTest extends AnyWordSpec with Matchers { - def randomList(n: Long) = + def randomList(n: Long): immutable.IndexedSeq[Double] = (1L to n).map(_ => math.random) - def buildQTree(k: Int, list: Seq[Double]) = { + def buildQTree(k: Int, list: Seq[Double]): QTree[Double] = { val qtSemigroup = new QTreeSemigroup[Double](k) qtSemigroup.sumOption(list.map(QTree(_))).get } @@ -49,7 +50,7 @@ class QTreeTest extends AnyWordSpec with Matchers { sorted(rank) } - def trueRangeSum(list: Seq[Double], from: Double, to: Double) = + def trueRangeSum(list: Seq[Double], from: Double, to: Double): Double = list.filter(_ >= from).filter(_ < to).sum for (k <- Seq(3, 11, 51, 101)) { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala b/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala index e91a09cf3..b767413d0 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/RightFolded2Test.scala @@ -8,10 +8,10 @@ import scala.annotation.tailrec class RightFolded2Test extends CheckProperties { import com.twitter.algebird.BaseProperties._ - def monFold(i: Int, l: Long) = l + i.toLong - def mapFn(l: Long) = l / 2 + def monFold(i: Int, l: Long): Long = l + i.toLong + def mapFn(l: Long): Long = l / 2 - implicit val rightFoldedMonoid = + implicit val rightFoldedMonoid: RightFolded2Monoid[Int, Long, Long] = RightFolded2.monoid[Int, Long, Long](mapFn)(monFold) def rightFolded2Value[In, Out, Acc](implicit diff --git a/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala index 80d3d03ce..18d40e370 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/RightFoldedTest.scala @@ -19,7 +19,8 @@ class RightFoldedTest extends CheckProperties { Gen.oneOf(rightFoldedValue[Out].arbitrary, rightFoldedToFold[In].arbitrary) } - implicit val rightFoldedMonoid = RightFolded.monoid[Int, Long]((i, l) => l + i.toLong) + implicit val rightFoldedMonoid: Monoid[RightFolded[Int, Long]] = + RightFolded.monoid[Int, Long]((i, l) => l + i.toLong) property("RightFolded is a monoid") { monoidLaws[RightFolded[Int, Long]] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala index 3dc45c328..6cf7e8006 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala @@ -6,28 +6,29 @@ import org.scalacheck.{Arbitrary, Gen} class SGDLaws extends CheckProperties { import com.twitter.algebird.BaseProperties._ - implicit val sgdMonoid = + implicit val sgdMonoid: SGDMonoid[(Double, IndexedSeq[Double])] = new SGDMonoid(SGD.constantStep(0.001), SGD.linearGradient) - val zeroStepMonoid = new SGDMonoid(SGD.constantStep(0.0), SGD.linearGradient) + val zeroStepMonoid: SGDMonoid[(Double, IndexedSeq[Double])] = + new SGDMonoid(SGD.constantStep(0.0), SGD.linearGradient) val (m, b) = (2.0, 4.0) - val eps = 1e-3 + val eps: Double = 1e-3 - val sgdPosGen = for { + val sgdPosGen: Gen[SGDPos[(Double, Vector[Double])]] = for { x <- Gen.choose(0.0, 1.0) n <- Gen.choose(0.0, 0.001) } yield SGDPos((m * x + b + n, Vector(x))) - val sgdWGen = for { + val sgdWGen: Gen[SGDWeights] = for { cnt <- Gen.choose(0L, 100000L) m <- Gen.choose(-10.0, 10.0) b <- Gen.choose(-10.0, 10.0) } yield SGDWeights(cnt, Vector(m, b)) - val zeroGen = Gen.const(SGDZero) + val zeroGen: Gen[SGDZero.type] = Gen.const(SGDZero) - implicit val sgdPos = Arbitrary(sgdPosGen) - implicit val sgdWArb = Arbitrary(sgdWGen) + implicit val sgdPos: Arbitrary[SGDPos[(Double, Vector[Double])]] = Arbitrary(sgdPosGen) + implicit val sgdWArb: Arbitrary[SGDWeights] = Arbitrary(sgdWGen) implicit val sgdArb: Arbitrary[SGD[(Double, IndexedSeq[Double])]] = Arbitrary { Gen.oneOf(sgdWGen, sgdPosGen, zeroGen) @@ -65,7 +66,8 @@ class SGDLaws extends CheckProperties { def minus(x: IndexedSeq[Double], y: IndexedSeq[Double]): IndexedSeq[Double] = x.zip(y).map { case (x: Double, y: Double) => x - y } - val oneStepMonoid = new SGDMonoid(SGD.constantStep(1.0), SGD.linearGradient) + val oneStepMonoid: SGDMonoid[(Double, IndexedSeq[Double])] = + new SGDMonoid(SGD.constantStep(1.0), SGD.linearGradient) property("unit step can be undone by adding gradient") { forAll { (w: SGDWeights, pos: SGDPos[(Double, Vector[Double])]) => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala index 501c4022f..d5faea610 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala @@ -3,12 +3,13 @@ package com.twitter.algebird import org.scalacheck.{Arbitrary, Gen} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec +import scala.util.Random object SketchMapTestImplicits { - val DELTA = 1e-6 - val EPS = 0.001 - val SEED = 1 - val HEAVY_HITTERS_COUNT = 10 + val DELTA: Double = 1e-6 + val EPS: Double = 0.001 + val SEED: Int = 1 + val HEAVY_HITTERS_COUNT: Int = 10 } class SketchMapLaws extends CheckProperties { @@ -16,9 +17,9 @@ class SketchMapLaws extends CheckProperties { import SketchMapTestImplicits._ import HyperLogLog.int2Bytes - val params = SketchMapParams[Int](SEED, EPS, 1e-3, HEAVY_HITTERS_COUNT) - implicit val smMonoid = SketchMap.monoid[Int, Long](params) - implicit val smGen = Arbitrary { + val params: SketchMapParams[Int] = SketchMapParams[Int](SEED, EPS, 1e-3, HEAVY_HITTERS_COUNT) + implicit val smMonoid: SketchMapMonoid[Int, Long] = SketchMap.monoid[Int, Long](params) + implicit val smGen: Arbitrary[SketchMap[Int, Long]] = Arbitrary { for (key: Int <- Gen.choose(0, 10000)) yield (smMonoid.create((key, 1L))) } @@ -39,9 +40,9 @@ class SketchMapTest extends AnyWordSpec with Matchers { import SketchMapTestImplicits._ import HyperLogLog.int2Bytes - val PARAMS = SketchMapParams[Int](SEED, EPS, DELTA, HEAVY_HITTERS_COUNT) - val MONOID = SketchMap.monoid[Int, Long](PARAMS) - val RAND = new scala.util.Random + val PARAMS: SketchMapParams[Int] = SketchMapParams[Int](SEED, EPS, DELTA, HEAVY_HITTERS_COUNT) + val MONOID: SketchMapMonoid[Int, Long] = SketchMap.monoid[Int, Long](PARAMS) + val RAND: Random = new scala.util.Random "SketchMap" should { "count total number of elements in a stream" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala index c0a8f7fe4..3fe48f447 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala @@ -43,7 +43,7 @@ class SummingIteratorTest extends AnyPropSpec with ScalaCheckPropertyChecks with StatefulSummerLaws.zeroEquiv(Semigroup.sumOption(it0), Semigroup.sumOption(it1)) case class Capacity(c: Int) - implicit val capArb = Arbitrary { + implicit val capArb: Arbitrary[Capacity] = Arbitrary { for (c <- Gen.choose(0, 10240)) yield Capacity(c) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala index 78c661ad7..07eeff67b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SummingQueueTest.scala @@ -18,10 +18,11 @@ package com.twitter.algebird import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Prop._ +import scala.util.Random object SummingCacheTest { case class Capacity(cap: Int) extends AnyVal - implicit val capArb = Arbitrary { + implicit val capArb: Arbitrary[Capacity] = Arbitrary { for (c <- Gen.choose(0, 1024)) yield Capacity(c) } } @@ -36,7 +37,7 @@ class SummingCacheTest extends CheckProperties { SummingCache[K, V](c.cap) // Maps are tricky to compare equality for since zero values are often removed - def test[K, V: Monoid](c: Capacity, items: List[(K, V)]) = { + def test[K, V: Monoid](c: Capacity, items: List[(K, V)]): Boolean = { val sc = newCache[K, V](c) val mitems = items.map(Map(_)) implicit val mapEq = mapEquiv[K, V] @@ -56,16 +57,16 @@ class SummingCacheTest extends CheckProperties { class AdaptiveCacheTest extends SummingCacheTest { import SummingCacheTest._ - override def newCache[K, V: Monoid](c: Capacity) = + override def newCache[K, V: Monoid](c: Capacity): StatefulSummer[Map[K, V]] = new AdaptiveCache[K, V](c.cap) } class SummingWithHitsCacheTest extends SummingCacheTest { import SummingCacheTest._ - val RAND = new scala.util.Random + val RAND: Random = new scala.util.Random - def getHits[K, V: Monoid](c: Capacity, items: List[(K, V)]) = { + def getHits[K, V: Monoid](c: Capacity, items: List[(K, V)]): List[Int] = { val sc = SummingWithHitsCache[K, V](c.cap) val mitems = items.map(Map(_)) mitems.map(sc.putWithHits(_)._1).tail @@ -106,13 +107,13 @@ class SummingWithHitsCacheTest extends SummingCacheTest { } class SummingQueueTest extends CheckProperties { - val zeroCapQueue = SummingQueue[Int](0) // passes all through + val zeroCapQueue: SummingQueue[Int] = SummingQueue[Int](0) // passes all through property("0 capacity always returns") { forAll { i: Int => zeroCapQueue(i) == Some(i) } } - val sb = SummingQueue[Int](3) // buffers three at a time + val sb: SummingQueue[Int] = SummingQueue[Int](3) // buffers three at a time property("puts are like sums") { forAll((items: List[Int]) => StatefulSummerLaws.sumIsPreserved(sb, items)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala index e4e15932e..caffac678 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala @@ -26,10 +26,10 @@ import scala.collection.JavaConverters._ class TopKTests extends CheckProperties { import com.twitter.algebird.BaseProperties._ - val SIZE = 10 + val SIZE: Int = 10 - implicit def qmonoid = new PriorityQueueMonoid[Int](SIZE) - implicit def queueArb = Arbitrary { + implicit def qmonoid: PriorityQueueMonoid[Int] = new PriorityQueueMonoid[Int](SIZE) + implicit def queueArb: Arbitrary[PriorityQueue[Int]] = Arbitrary { implicitly[Arbitrary[List[Int]]].arbitrary.map(qmonoid.build(_)) } @@ -51,7 +51,7 @@ class TopKTests extends CheckProperties { * The following were specific bugs that we failed some prior * scalacheck (yay for randomized testing) */ - val pqPriorBugs = Seq(List(List(1, 1, 1, 2), List(0, 0, 0, 0, 0, 0, 0))) + val pqPriorBugs: Seq[List[List[Int]]] = Seq(List(List(1, 1, 1, 2), List(0, 0, 0, 0, 0, 0, 0))) property("Specific regressions are handled") { pqPriorBugs.forall(pqIsCorrect(_)) } @@ -60,9 +60,9 @@ class TopKTests extends CheckProperties { monoidLaws[PriorityQueue[Int]] } - implicit def tkmonoid = new TopKMonoid[Int](SIZE) + implicit def tkmonoid: TopKMonoid[Int] = new TopKMonoid[Int](SIZE) - implicit def topkArb = Arbitrary { + implicit def topkArb: Arbitrary[TopK[Int]] = Arbitrary { implicitly[Arbitrary[List[Int]]].arbitrary.map(tkmonoid.build(_)) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala index b2d9f58b1..3867b66c9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala @@ -7,11 +7,11 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { // This gives you an implicit conversion from tuples of aggregators // to aggregator of tuples - val data = List(1, 3, 2, 0, 5, 6) - val MinAgg = Aggregator.min[Int] + val data: List[Int] = List(1, 3, 2, 0, 5, 6) + val MinAgg: Aggregator[Int, Int, Int] = Aggregator.min[Int] - val longData = data.map(_.toLong) - val SizeAgg = Aggregator.size + val longData: List[Long] = data.map(_.toLong) + val SizeAgg: MonoidAggregator[Any, Long, Long] = Aggregator.size "GeneratedTupleAggregators" should { import GeneratedTupleAggregator._ diff --git a/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala index 1665a292e..1d5c3ece7 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala @@ -22,7 +22,7 @@ class VectorSpaceProperties extends CheckProperties { import com.twitter.algebird.BaseVectorSpaceProperties._ // TODO: we won't need this when we have an Equatable trait - def mapEqFn(a: Map[Int, Double], b: Map[Int, Double]) = + def mapEqFn(a: Map[Int, Double], b: Map[Int, Double]): Boolean = (a.keySet ++ b.keySet).forall { key => (a.get(key), b.get(key)) match { case (Some(aVal), Some(bVal)) => beCloseTo(aVal, bVal) @@ -32,7 +32,7 @@ class VectorSpaceProperties extends CheckProperties { } } - implicit val genDouble = Arbitrary(Gen.choose(-1.0e50, 1.0e50)) + implicit val genDouble: Arbitrary[Double] = Arbitrary(Gen.choose(-1.0e50, 1.0e50)) property("map int double scaling") { vectorSpaceLaws[Double, ({ type x[a] = Map[Int, a] })#x](mapEqFn(_, _)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala index aaf772f75..cf989aa1b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/WindowLawsTest.scala @@ -10,7 +10,7 @@ import org.scalacheck.Prop.forAll class WindowLaws extends CheckProperties { - implicit val mon = Window.monoid[Int](5) + implicit val mon: WindowMonoid[Int] = Window.monoid[Int](5) implicit def wGen[A: Arbitrary](implicit wm: WindowMonoid[A]): Arbitrary[Window[A]] = Arbitrary { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala index 765d0229b..536460355 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala @@ -6,7 +6,7 @@ import org.scalacheck.{Arbitrary, Gen} class GaussianDistributionMonoidTests extends CheckProperties { import com.twitter.algebird.BaseProperties._ - implicit val gaussianGenerators = Arbitrary { + implicit val gaussianGenerators: Arbitrary[GaussianDistribution] = Arbitrary { for { mean <- Gen.choose(0, 10000) sigma <- Gen.choose(0, 10000) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala index 31af74502..ebf851cdb 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/statistics/StatisticsTests.scala @@ -6,17 +6,19 @@ import org.scalacheck.Arbitrary import org.scalacheck.Gen._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec +import com.twitter.algebird.Ring +import com.twitter.algebird.Monoid class StatisticsRingLaws extends CheckProperties with Matchers { - implicit val statsRing = new StatisticsRing[Int] - implicit val arb = Arbitrary(for (v <- choose(0, 1 << 30)) yield v) + implicit val statsRing: StatisticsRing[Int] = new StatisticsRing[Int]()(Ring.intRing) + implicit val arb: Arbitrary[Int] = Arbitrary(for (v <- choose(0, 1 << 30)) yield v) property("StatisticsRing is a Ring")(ringLaws[Int]) } class StatisticsMonoidLaws extends CheckProperties with Matchers { - implicit val statsMonoid = new StatisticsMonoid[Int] - implicit val arb = Arbitrary(for (v <- choose(0, 1 << 14)) yield v) + implicit val statsMonoid: StatisticsMonoid[Int] = new StatisticsMonoid[Int]()(Monoid.intMonoid) + implicit val arb: Arbitrary[Int] = Arbitrary(for (v <- choose(0, 1 << 14)) yield v) property("StatisticsMonoid is a Monoid")(monoidLaws[Int]) } @@ -24,7 +26,7 @@ class StatisticsMonoidLaws extends CheckProperties with Matchers { class StatisticsTest extends AnyWordSpec with Matchers { // the test framework garbles the exceptions :/ - lazy val statsMonoid = new StatisticsMonoid[Int] + lazy val statsMonoid: StatisticsMonoid[Int] = new StatisticsMonoid[Int] try { for (_ <- 1 to 2) statsMonoid.zero for (i <- 1 to 3) statsMonoid.plus(i, i) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala index 268c8e6a4..460af9c2b 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala @@ -25,7 +25,7 @@ import com.twitter.util.Promise * the value just before the PromiseLink is calculated. */ class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { //TODo(jcoveney) rename PromiseLink - def zero = PromiseLink(new Promise, monoid.zero) + def zero: PromiseLink[V] = PromiseLink(new Promise, monoid.zero) def plus(older: PromiseLink[V], newer: PromiseLink[V]): PromiseLink[V] = { val (PromiseLink(p1, v1), PromiseLink(p2, v2)) = (older, newer) @@ -33,7 +33,7 @@ class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { / PromiseLink(p2, monoid.plus(v1, v2)) } - override def isNonZero(v: PromiseLink[V]) = monoid.isNonZero(v.value) + override def isNonZero(v: PromiseLink[V]): Boolean = monoid.isNonZero(v.value) } /** @@ -51,5 +51,5 @@ object PromiseLink { implicit def monoid[V](implicit innerMonoid: Monoid[V]): PromiseLinkMonoid[V] = new PromiseLinkMonoid[V](innerMonoid) - def toPromiseLink[V](value: V) = PromiseLink(new Promise, value) + def toPromiseLink[V](value: V): PromiseLink[V] = PromiseLink(new Promise, value) } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala index 67047fe73..025c67bb1 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala @@ -29,12 +29,12 @@ import com.twitter.util.{Future, Promise, Return} * objects are created. This is the async analogue of Function1Monoid. */ class TunnelMonoid[V] extends Monoid[Tunnel[V]] { - def zero = { + def zero: Tunnel[V] = { val promise = new Promise[V] Tunnel(promise, promise) } - override def isNonZero(v: Tunnel[V]) = !(v.promise eq v.future) + override def isNonZero(v: Tunnel[V]): Boolean = !(v.promise eq v.future) def plus(older: Tunnel[V], newer: Tunnel[V]): Tunnel[V] = { val (Tunnel(f1, p1), Tunnel(f2, p2)) = (older, newer) @@ -76,7 +76,7 @@ object Tunnel { * This lifts a value into a Tunnel. This is where the Monoidic * computation underlying a TunnelMonoid actually happens. */ - def toIncrement[V](v: V)(implicit monoid: Monoid[V]) = { + def toIncrement[V](v: V)(implicit monoid: Monoid[V]): Tunnel[V] = { val promise = new Promise[V] Tunnel(promise.map(monoid.plus(_, v)), promise) } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala index 6eae7e13c..d685fd72b 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala @@ -62,12 +62,12 @@ class AsyncListSum[Key, Value]( (new MapContainer(Future.value(v) :: privBuf, size + 1, compact), 1) } - override def equals(o: Any) = o match { + override def equals(o: Any): Boolean = o match { case that: MapContainer => that eq this case _ => false } - lazy val toSeq = privBuf.reverse + lazy val toSeq: List[Future[Value]] = privBuf.reverse } protected override val emptyResult = Map.empty[Key, Value] diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala index e5b1635f5..5d60e9f7b 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala @@ -23,7 +23,7 @@ import com.twitter.util.Future trait AsyncSummer[T, +M <: Iterable[T]] { self => def flush: Future[M] def tick: Future[M] - def add(t: T) = addAll(Iterator(t)) + def add(t: T): Future[M] = addAll(Iterator(t)) def addAll(vals: TraversableOnce[T]): Future[M] def isFlushed: Boolean @@ -40,11 +40,11 @@ trait AsyncSummer[T, +M <: Iterable[T]] { self => trait AsyncSummerProxy[T, +M <: Iterable[T]] extends AsyncSummer[T, M] { def self: AsyncSummer[T, M] - def flush = self.flush - def tick = self.tick - override def add(t: T) = self.add(t) - def addAll(vals: TraversableOnce[T]) = self.addAll(vals) - def isFlushed = self.isFlushed + def flush: Future[M] = self.flush + def tick: Future[M] = self.tick + override def add(t: T): Future[M] = self.add(t) + def addAll(vals: TraversableOnce[T]): Future[M] = self.addAll(vals) + def isFlushed: Boolean = self.isFlushed override def cleanup: Future[Unit] = self.cleanup } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala index ba6fc0bfc..734cc8dfd 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala @@ -169,9 +169,9 @@ class ApproxHHTracker(hhPct: HeavyHittersPercent, updateFreq: UpdateFrequency, r } object HeavyHittersCachingSummer { - val DEFAULT_HH_PERCENT = HeavyHittersPercent(0.01f) - val DEFAULT_ROLL_OVER_FREQUENCY = RollOverFrequency(1000000L) - val DEFAULT_UPDATE_FREQUENCY = UpdateFrequency(2) + val DEFAULT_HH_PERCENT: HeavyHittersPercent = HeavyHittersPercent(0.01f) + val DEFAULT_ROLL_OVER_FREQUENCY: RollOverFrequency = RollOverFrequency(1000000L) + val DEFAULT_UPDATE_FREQUENCY: UpdateFrequency = UpdateFrequency(2) def apply[Key, Value]( flushFrequency: FlushFrequency, @@ -182,7 +182,7 @@ object HeavyHittersCachingSummer { insertOp: Incrementor, sizeIncr: Incrementor, backingSummer: AsyncSummer[(Key, Value), Iterable[(Key, Value)]] - ) = + ): HeavyHittersCachingSummer[Key, Value] = new HeavyHittersCachingSummer[Key, Value]( DEFAULT_HH_PERCENT, DEFAULT_UPDATE_FREQUENCY, @@ -207,7 +207,7 @@ object HeavyHittersCachingSummer { insertOp: Incrementor, sizeIncr: Incrementor, backingSummer: AsyncSummer[(Key, Value), Iterable[(Key, Value)]] - ) = + ): HeavyHittersCachingSummer[Key, Value] = new HeavyHittersCachingSummer[Key, Value]( hhPct, updateFreq, @@ -238,8 +238,8 @@ class HeavyHittersCachingSummer[K, V]( type T = (K, V) // We only treat the K, V types as a pair almost exclusively in this class. override def flush: Future[Iterable[T]] = backingSummer.flush - override def isFlushed = backingSummer.isFlushed - override val emptyResult = Seq[T]() + override def isFlushed: Boolean = backingSummer.isFlushed + override val emptyResult: Seq[(K, V)] = Seq[T]() private[this] final val approxHH = new ApproxHHTracker(hhPct, updateFreq, roFreq) diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala index 3ce5a973a..176c6b387 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala @@ -25,7 +25,7 @@ object TunnelMonoidProperties { makeRandomInput: Int => I, makeTunnel: I => V, collapseFinalValues: (V, Seq[V], I) => Seq[Future[I]] - ) = { + ): Boolean = { val r = new Random val numbers = (1 to 40).map(_ => makeRandomInput(r.nextInt)) def helper(seeds: Seq[I], toFeed: I) = { @@ -62,7 +62,7 @@ object TunnelMonoidProperties { class TunnelMonoidPropertiesextends extends CheckProperties { import TunnelMonoidProperties._ - implicit val monoid = new Monoid[Int] { + implicit val monoid: Monoid[Int] = new Monoid[Int] { val zero = 0 def plus(older: Int, newer: Int): Int = older + newer } diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala index 557e6e681..3e6e8f439 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncSummerLaws.scala @@ -21,33 +21,35 @@ import com.twitter.algebird.{MapAlgebra, Semigroup} import com.twitter.util.{Await, Duration, Future, FuturePool} import java.util.concurrent.Executors +import com.twitter.util.ExecutorServiceFuturePool +import java.util.concurrent.ExecutorService object AsyncSummerLaws { - val executor = Executors.newFixedThreadPool(4) - val workPool = FuturePool(executor) + val executor: ExecutorService = Executors.newFixedThreadPool(4) + val workPool: ExecutorServiceFuturePool = FuturePool(executor) private[this] val schedulingExecutor = Executors.newFixedThreadPool(4) private[this] val schedulingWorkPool = FuturePool(schedulingExecutor) - implicit def arbFlushFreq = Arbitrary { + implicit def arbFlushFreq: Arbitrary[FlushFrequency] = Arbitrary { Gen .choose(1, 4000) .map { x: Int => FlushFrequency(Duration.fromMilliseconds(x)) } } - implicit def arbBufferSize = Arbitrary { + implicit def arbBufferSize: Arbitrary[BufferSize] = Arbitrary { Gen .choose(1, 10) .map(x => BufferSize(x)) } - implicit def arbMemoryFlushPercent = Arbitrary { + implicit def arbMemoryFlushPercent: Arbitrary[MemoryFlushPercent] = Arbitrary { Gen .choose(80.0f, 90.0f) .map(x => MemoryFlushPercent(x)) } - implicit def arbCompactSize = Arbitrary { + implicit def arbCompactSize: Arbitrary[CompactionSize] = Arbitrary { Gen .choose(1, 10) .map(x => CompactionSize(x)) @@ -58,7 +60,7 @@ object AsyncSummerLaws { def summingWithAndWithoutSummerShouldMatch[K, V: Semigroup]( asyncSummer: AsyncSummer[(K, V), Iterable[(K, V)]], inputs: List[List[(K, V)]] - ) = { + ): Boolean = { val reference = MapAlgebra.sumByKey(inputs.flatten) val resA = Await .result(Future.collect(inputs.map { i => diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala index 17a01d8a4..97c108535 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala @@ -12,7 +12,7 @@ case class Counter(name: String) extends Incrementor { override def incrBy(amount: Long): Unit = counter.addAndGet(amount) - def size = counter.get() + def size: Long = counter.get() override def toString: String = s"$name: size:$size" } From dfe5a487a2b089912843e8597e31306153f97b11 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 22 Jul 2020 20:27:29 +0200 Subject: [PATCH 082/306] Update util-core to 20.7.0 (#848) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 5b1314633..f34be7def 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.2.0" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" -val utilVersion = "20.6.0" +val utilVersion = "20.7.0" val sparkVersion = "2.4.6" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From d02936fff0f9f72190891a7c8e36d200c1cf6f6c Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 23 Jul 2020 16:53:28 +0100 Subject: [PATCH 083/306] Add kind-projector (#849) * Add kind-projector * fixup! Add kind-projector * fixup! Add kind-projector --- .../com/twitter/algebird/DecayedVector.scala | 24 +++++++------------ .../scala/com/twitter/algebird/Fold.scala | 4 ++-- .../scala/com/twitter/algebird/Scan.scala | 4 ++-- .../com/twitter/algebird/VectorSpace.scala | 4 ++-- .../twitter/algebird/monad/EitherMonad.scala | 6 ++--- .../com/twitter/algebird/monad/Reader.scala | 6 ++--- .../algebird/monad/StateWithError.scala | 5 ++-- .../algebird/DecayedVectorProperties.scala | 8 +++---- .../algebird/VectorSpaceProperties.scala | 2 +- build.sbt | 8 ++++++- 10 files changed, 34 insertions(+), 37 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala index fc7747752..1350d208f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala @@ -38,31 +38,23 @@ object DecayedVector extends CompatDecayedVector { } } - def forMap[K](m: Map[K, Double], scaledTime: Double): DecayedVector[ - ({ - type x[a] = Map[K, a] - })#x - ] = - DecayedVector[({ type x[a] = Map[K, a] })#x](m, scaledTime) - def forMapWithHalflife[K](m: Map[K, Double], time: Double, halfLife: Double): DecayedVector[ - ({ - type x[a] = Map[K, a] - })#x - ] = + def forMap[K](m: Map[K, Double], scaledTime: Double): DecayedVector[Map[K, *]] = + DecayedVector[Map[K, *]](m, scaledTime) + def forMapWithHalflife[K](m: Map[K, Double], time: Double, halfLife: Double): DecayedVector[Map[K, *]] = forMap(m, time * scala.math.log(2.0) / halfLife) def mapMonoidWithEpsilon[K]( eps: Double )(implicit - vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], + vs: VectorSpace[Double, Map[K, *]], metric: Metric[Map[K, Double]] - ): Monoid[DecayedVector[({ type x[a] = Map[K, a] })#x]] = - monoidWithEpsilon[({ type x[a] = Map[K, a] })#x](eps) + ): Monoid[DecayedVector[Map[K, *]]] = + monoidWithEpsilon[Map[K, *]](eps) implicit def mapMonoid[K](implicit - vs: VectorSpace[Double, ({ type x[a] = Map[K, a] })#x], + vs: VectorSpace[Double, Map[K, *]], metric: Metric[Map[K, Double]] - ): Monoid[DecayedVector[({ type x[a] = Map[K, a] })#x]] = + ): Monoid[DecayedVector[Map[K, *]]] = mapMonoidWithEpsilon(-1.0) def scaledPlus[C[_]](newVal: DecayedVector[C], oldVal: DecayedVector[C], eps: Double)(implicit diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index 43393b6a1..d0d659f0e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -187,7 +187,7 @@ object Fold extends CompatFold { /** * "import Fold.applicative" will bring the Applicative instance into scope. See FoldApplicative. */ - implicit def applicative[I]: Applicative[({ type L[O] = Fold[I, O] })#L] = + implicit def applicative[I]: Applicative[Fold[I, *]] = new FoldApplicative[I] /** @@ -349,7 +349,7 @@ object Fold extends CompatFold { /** * Folds are Applicatives! */ -class FoldApplicative[I] extends Applicative[({ type L[O] = Fold[I, O] })#L] { +class FoldApplicative[I] extends Applicative[Fold[I, *]] { override def map[T, U](mt: Fold[I, T])(fn: T => U): Fold[I, U] = mt.map(fn) override def apply[T](v: T): Fold[I, T] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala index cfaca330e..7d3ef485b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala @@ -13,7 +13,7 @@ object Scan { */ type Aux[-I, S, +O] = Scan[I, O] { type State = S } - implicit def applicative[I]: Applicative[({ type L[O] = Scan[I, O] })#L] = new ScanApplicative[I] + implicit def applicative[I]: Applicative[Scan[I, *]] = new ScanApplicative[I] def from[I, S, O](initState: S)(presentAndNextStateFn: (I, S) => (O, S)): Aux[I, S, O] = new Scan[I, O] { @@ -304,7 +304,7 @@ sealed abstract class Scan[-I, +O] extends Serializable { } -class ScanApplicative[I] extends Applicative[({ type L[O] = Scan[I, O] })#L] { +class ScanApplicative[I] extends Applicative[Scan[I, *]] { override def map[T, U](mt: Scan[I, T])(fn: T => U): Scan[I, U] = mt.andThenPresent(fn) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala index 53e357ef7..11c58bafa 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/VectorSpace.scala @@ -44,8 +44,8 @@ sealed trait Implicits extends LowPrioImpicits { } sealed trait LowPrioImpicits { - implicit def mapSpace[K, T: Ring]: VectorSpace[T, ({ type x[a] = Map[K, a] })#x] = - VectorSpaceOps.from[T, ({ type x[a] = Map[K, a] })#x] { (s, m) => + implicit def mapSpace[K, T: Ring]: VectorSpace[T, Map[K, *]] = + VectorSpaceOps.from[T, Map[K, *]] { (s, m) => m.transform { case (_, v) => Ring.times(s, v) } } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala index 4ffa9ed25..be0d71113 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/EitherMonad.scala @@ -20,7 +20,7 @@ import com.twitter.algebird.Monad // Monad for either, used for modeling Error where L is the type of the error object EitherMonad { - class Error[L] extends Monad[({ type RightType[R] = Either[L, R] })#RightType] { + class Error[L] extends Monad[Either[L, *]] { override def apply[R](r: R): Right[L, R] = Right(r) override def flatMap[T, U](self: Either[L, T])(next: T => Either[L, U]): Either[L, U] = @@ -29,8 +29,8 @@ object EitherMonad { override def map[T, U](self: Either[L, T])(fn: T => U): Either[L, U] = self.right.map(fn) } - implicit def monad[L]: Monad[({ type RightT[R] = Either[L, R] })#RightT] = - new Error[L] + + implicit def monad[L]: Monad[Either[L, *]] = new Error[L] def assert[L](truth: Boolean, failure: => L): Either[L, Unit] = if (truth) Right(()) else Left(failure) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala index f1e4c97dd..12009f263 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/Reader.scala @@ -65,12 +65,12 @@ object Reader { def const[T](t: T): Reader[Any, T] = ConstantReader(t) implicit def apply[E, T](fn: (E) => T): Reader[E, T] = ReaderFn(fn) - class ReaderM[Env] extends Monad[({ type Result[T] = Reader[Env, T] })#Result] { + class ReaderM[Env] extends Monad[Reader[Env, *]] { override def apply[T](t: T): ConstantReader[T] = ConstantReader(t) override def flatMap[T, U](self: Reader[Env, T])(next: T => Reader[Env, U]): Reader[Env, U] = self.flatMap(next) override def map[T, U](self: Reader[Env, T])(fn: T => U): Reader[Env, U] = self.map(fn) } - implicit def monad[Env]: Monad[({ type Result[T] = Reader[Env, T] })#Result] = - new ReaderM[Env] + + implicit def monad[Env]: Monad[Reader[Env, *]] = new ReaderM[Env] } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala index 62d8fdbab..d502da156 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala @@ -121,10 +121,9 @@ object StateWithError { def toKleisli[S]: FunctionLifter[S] = new FunctionLifter[S] implicit def apply[S, F, T](fn: S => Either[F, (S, T)]): StateWithError[S, F, T] = StateFn(fn) - implicit def monad[S, F]: Monad[({ type Result[T] = StateWithError[S, F, T] })#Result] = - new StateFMonad[F, S] + implicit def monad[S, F]: Monad[StateWithError[S, F, *]] = new StateFMonad[F, S] - class StateFMonad[F, S] extends Monad[({ type Result[T] = StateWithError[S, F, T] })#Result] { + class StateFMonad[F, S] extends Monad[StateWithError[S, F, *]] { override def apply[T](const: T): StateWithError[S, Nothing, T] = { (s: S) => Right((s, const)) } override def flatMap[T, U]( earlier: StateWithError[S, F, T] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala index c83ed8cdb..f0e9bf0fb 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayedVectorProperties.scala @@ -21,7 +21,7 @@ import org.scalacheck.{Arbitrary, Gen} class DecayedVectorProperties extends CheckProperties { import com.twitter.algebird.BaseProperties._ - implicit val mpint: Arbitrary[DecayedVector[({ type x[a] = Map[Int, a] })#x]] = Arbitrary { + implicit val mpint: Arbitrary[DecayedVector[Map[Int, *]]] = Arbitrary { for { t <- Gen.choose(1e-4, 200.0) // Not too high so as to avoid numerical issues m <- Gen.mapOf(Gen.zip(Gen.choose(0, 100), Gen.choose(-1e5, 1e5))) @@ -30,8 +30,8 @@ class DecayedVectorProperties extends CheckProperties { // TODO: we won't need this when we have an Equatable trait def decayedMapEqFn( - a: DecayedVector[({ type x[a] = Map[Int, a] })#x], - b: DecayedVector[({ type x[a] = Map[Int, a] })#x] + a: DecayedVector[Map[Int, *]], + b: DecayedVector[Map[Int, *]] ): Boolean = { def beCloseTo(a: Double, b: Double, eps: Double = 1e-5) = @@ -54,6 +54,6 @@ class DecayedVectorProperties extends CheckProperties { property("DecayedVector[Map[Int, *]] is a monoid") { implicit val equiv = Equiv.fromFunction(decayedMapEqFn) - monoidLaws[DecayedVector[({ type x[a] = Map[Int, a] })#x]] + monoidLaws[DecayedVector[Map[Int, *]]] } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala index 1d5c3ece7..7e4121dc0 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/VectorSpaceProperties.scala @@ -35,6 +35,6 @@ class VectorSpaceProperties extends CheckProperties { implicit val genDouble: Arbitrary[Double] = Arbitrary(Gen.choose(-1.0e50, 1.0e50)) property("map int double scaling") { - vectorSpaceLaws[Double, ({ type x[a] = Map[Int, a] })#x](mapEqFn(_, _)) + vectorSpaceLaws[Double, Map[Int, *]](mapEqFn(_, _)) } } diff --git a/build.sbt b/build.sbt index f34be7def..3e4cd4c5e 100644 --- a/build.sbt +++ b/build.sbt @@ -6,6 +6,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" +val kindProjectorVersion = "0.11.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.0" @@ -252,6 +253,7 @@ lazy val algebirdCore = module("core").settings( Seq(compilerPlugin(("org.scalamacros" % "paradise" % paradiseVersion).cross(CrossVersion.full))) } }, + addCompilerPlugin(("org.typelevel" % "kind-projector" % kindProjectorVersion).cross(CrossVersion.full)), sourceGenerators in Compile += Def.task { GenTupleAggregators.gen((sourceManaged in Compile).value) }.taskValue, @@ -275,7 +277,10 @@ lazy val algebirdTest = module("test") } else { Seq(compilerPlugin(("org.scalamacros" % "paradise" % paradiseVersion).cross(CrossVersion.full))) } - } + }, + addCompilerPlugin( + ("org.typelevel" % "kind-projector" % kindProjectorVersion).cross(CrossVersion.full) + ) ) .dependsOn(algebirdCore) @@ -381,6 +386,7 @@ lazy val docs = project .settings(noPublishSettings) .settings(docSettings) .settings( + addCompilerPlugin(("org.typelevel" % "kind-projector" % kindProjectorVersion).cross(CrossVersion.full)), scalacOptions in Tut ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), sources in (ScalaUnidoc, unidoc) ~= (_.filterNot(_.absolutePath.contains("javaapi"))) ) From 7b436fe08f2882c24f4fbba15d373b05fa82d3de Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 5 Aug 2020 03:59:33 +0200 Subject: [PATCH 084/306] Update sbt-scalafmt to 2.4.2 (#852) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index a6932083b..5f62c5cd9 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") From 28f79d195168ee5f4e45aeaaf1543bcd79259c50 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 7 Aug 2020 09:24:19 +0200 Subject: [PATCH 085/306] Update scalatest to 3.2.1 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 3e4cd4c5e..f8b00ca10 100644 --- a/build.sbt +++ b/build.sbt @@ -9,7 +9,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.0" +val scalaTestVersion = "3.2.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" From 5cf2732aac49aef70aeeb8e3a12dd6423d15e54b Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 12 Aug 2020 03:25:34 +0200 Subject: [PATCH 086/306] Update util-core to 20.8.0 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f8b00ca10..4d089a7f2 100644 --- a/build.sbt +++ b/build.sbt @@ -13,7 +13,7 @@ val scalaTestVersion = "3.2.1" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" -val utilVersion = "20.7.0" +val utilVersion = "20.8.0" val sparkVersion = "2.4.6" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 434ba3df486d74b56910dc5b4b607d5cba596889 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 22 Aug 2020 09:42:35 +0200 Subject: [PATCH 087/306] Update scalatest to 3.2.2 (#857) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 4d089a7f2..42227aadd 100644 --- a/build.sbt +++ b/build.sbt @@ -9,7 +9,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.1" +val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" From e439ff694e46aa4117d35ff68e44fe8e236a501e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 26 Aug 2020 23:00:28 +0200 Subject: [PATCH 088/306] Update util-core to 20.8.1 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 42227aadd..90d5cd944 100644 --- a/build.sbt +++ b/build.sbt @@ -13,7 +13,7 @@ val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.1.6" -val utilVersion = "20.8.0" +val utilVersion = "20.8.1" val sparkVersion = "2.4.6" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 9c805c401e94450f90e6b23126ab3d62f17ac66b Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 2 Sep 2020 17:05:33 +0200 Subject: [PATCH 089/306] Update sbt-scalafix to 0.9.20 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5f62c5cd9..fe260f912 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.19") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.20") From f01d6ebcb84475e966b28fcfe391fefbe2c0cb6c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 4 Sep 2020 16:30:14 +0200 Subject: [PATCH 090/306] Update sbt-jmh to 0.4.0 (#859) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index fe260f912..80ee233ef 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -14,5 +14,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.7") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.20") From fb196652058d14f4b94fd04f16d6800dda61bbee Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 11 Sep 2020 20:18:48 +0200 Subject: [PATCH 091/306] Update sbt-mima-plugin to 0.8.0 (#861) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 80ee233ef..4bc82323f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.7.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") From 749b10d3c01131dd178f143bc5f44c7942b026bc Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 15 Sep 2020 07:32:37 +0200 Subject: [PATCH 092/306] Update scala-collection-compat to 2.2.0 (#863) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 90d5cd944..5de8f0d01 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" -val scalaCollectionCompat = "2.1.6" +val scalaCollectionCompat = "2.2.0" val utilVersion = "20.8.1" val sparkVersion = "2.4.6" From fdd3abadbab1b10c954825ecd851d7530c9744dc Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 15 Sep 2020 07:59:02 +0200 Subject: [PATCH 093/306] Update spark-core to 2.4.7 (#864) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 5de8f0d01..757b329ed 100644 --- a/build.sbt +++ b/build.sbt @@ -14,7 +14,7 @@ val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.2.0" val utilVersion = "20.8.1" -val sparkVersion = "2.4.6" +val sparkVersion = "2.4.7" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = CrossVersion.partialVersion(scalaVersion) match { From 2d3330180213603958eaa13bad858fba670c7433 Mon Sep 17 00:00:00 2001 From: Olafur Pall Geirsson Date: Fri, 18 Sep 2020 10:53:45 +0200 Subject: [PATCH 094/306] Setup automated releases from GitHub Actions CI Previously, releases were done from a local computer with the sbt-release plugin. Now, releases happen automatically from GitHub Actions. Every merge into master should trigger a SNAPSHOT release and every git tag push should trigger a stable Sonatype release. For more details, see htts://github.com/olafurpg/sbt-ci-release --- .github/workflows/release.yml | 19 +++++++++++++++++++ build.sbt | 33 ++++----------------------------- project/plugins.sbt | 2 +- version.sbt | 1 - 4 files changed, 24 insertions(+), 31 deletions(-) create mode 100644 .github/workflows/release.yml delete mode 100644 version.sbt diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..f39ddeca7 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,19 @@ +name: Release +on: + push: + branches: [develop] + tags: ["*"] +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: olafurpg/setup-scala@v7 + - uses: olafurpg/setup-gpg@v2 + - name: Publish ${{ github.ref }} + run: sbt ci-release + env: + PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} + PGP_SECRET: ${{ secrets.PGP_SECRET }} + SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} + SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} diff --git a/build.sbt b/build.sbt index 757b329ed..d1c48253d 100644 --- a/build.sbt +++ b/build.sbt @@ -1,4 +1,3 @@ -import ReleaseTransformations._ import algebird._ import com.typesafe.tools.mima.core._ import pl.project13.scala.sbt.JmhPlugin @@ -35,6 +34,9 @@ def scalaBinaryVersion(scalaVersion: String) = scalaVersion match { def isScala212x(scalaVersion: String) = scalaBinaryVersion(scalaVersion) == "2.12" def isScala213x(scalaVersion: String) = scalaBinaryVersion(scalaVersion) == "2.13" +noPublishSettings +crossScalaVersions := Nil + val sharedSettings = Seq( organization := "com.twitter", scalaVersion := "2.12.12", @@ -73,35 +75,7 @@ val sharedSettings = Seq( "com.novocode" % "junit-interface" % "0.11" % Test ), // Publishing options: - releaseCrossBuild := true, - releasePublishArtifactsAction := PgpKeys.publishSigned.value, - releaseVersionBump := sbtrelease.Version.Bump.Minor, // need to tweak based on mima results - publishMavenStyle := true, - publishArtifact in Test := false, pomIncludeRepository := { x => false }, - releaseProcess := Seq[ReleaseStep]( - checkSnapshotDependencies, - inquireVersions, - runClean, - releaseStepCommandAndRemaining("+test"), // formerly runTest, here to deal with algebird-spark - setReleaseVersion, - commitReleaseVersion, - tagRelease, - releaseStepCommandAndRemaining( - "+publishSigned" - ), // formerly publishArtifacts, here to deal with algebird-spark - ReleaseStep(action = releaseStepCommand("sonatypeBundleRelease")), - setNextVersion, - commitNextVersion, - pushChanges - ), - publishTo := sonatypePublishToBundle.value, - scmInfo := Some( - ScmInfo( - url("https://github.com/twitter/algebird"), - "scm:git@github.com:twitter/algebird.git" - ) - ), pomExtra := (https://github.com/twitter/algebird @@ -138,6 +112,7 @@ val sharedSettings = Seq( ) ++ mimaSettings lazy val noPublishSettings = Seq( + publish / skip := true, publish := {}, publishLocal := {}, test := {}, diff --git a/project/plugins.sbt b/project/plugins.sbt index 4bc82323f..5dd9033af 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,7 +7,6 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.0") @@ -16,3 +15,4 @@ addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.20") +addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.3") diff --git a/version.sbt b/version.sbt deleted file mode 100644 index b14908297..000000000 --- a/version.sbt +++ /dev/null @@ -1 +0,0 @@ -version in ThisBuild := "0.13.8-SNAPSHOT" From ac05c850a1b26baad17300c3c5e10d2c19a3b43f Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Fri, 18 Sep 2020 10:32:03 +0100 Subject: [PATCH 095/306] Update scalafmt to 2.7.1 (#866) --- .scalafmt.conf | 2 +- .../com/twitter/algebird/AdaptiveCache.scala | 17 ++-- .../com/twitter/algebird/AdaptiveVector.scala | 6 +- .../com/twitter/algebird/Applicative.scala | 8 +- .../com/twitter/algebird/BloomFilter.scala | 6 +- .../com/twitter/algebird/CountMinSketch.scala | 36 ++++---- .../com/twitter/algebird/DecayingCMS.scala | 12 +-- .../scala/com/twitter/algebird/ExpHist.scala | 8 +- .../scala/com/twitter/algebird/Fold.scala | 8 +- .../com/twitter/algebird/HyperLogLog.scala | 18 ++-- .../twitter/algebird/HyperLogLogSeries.scala | 33 ++++--- .../com/twitter/algebird/MapAlgebra.scala | 89 +++++++++---------- .../scala/com/twitter/algebird/Metric.scala | 5 +- .../com/twitter/algebird/SGDMonoid.scala | 5 +- .../com/twitter/algebird/SketchMap.scala | 18 ++-- .../com/twitter/algebird/SpaceSaver.scala | 35 ++++---- .../com/twitter/algebird/SummingCache.scala | 13 ++- .../com/twitter/algebird/macros/Cuber.scala | 7 +- .../twitter/algebird/macros/GroupMacro.scala | 8 +- .../twitter/algebird/macros/RingMacro.scala | 4 +- .../com/twitter/algebird/macros/Roller.scala | 7 +- .../algebird/macros/SemigroupMacro.scala | 9 +- .../algebird/matrix/AdaptiveMatrix.scala | 17 ++-- .../statistics/IterCallStatistics.scala | 5 +- .../twitter/algebird/generic/Instances.scala | 7 +- .../algebird/ApproximateProperty.scala | 5 +- .../macros/ArbitraryCaseClassMacro.scala | 5 +- .../com/twitter/algebird/AggregatorLaws.scala | 5 +- .../com/twitter/algebird/BatchedTest.scala | 5 +- .../twitter/algebird/BloomFilterTest.scala | 9 +- .../algebird/CollectionSpecification.scala | 9 +- .../twitter/algebird/CorrelationLaws.scala | 4 +- .../twitter/algebird/DecayingCMSTest.scala | 31 +++---- .../com/twitter/algebird/ExpHistLaws.scala | 13 ++- .../com/twitter/algebird/IntervalLaws.scala | 7 +- .../scala/com/twitter/algebird/ScanTest.scala | 7 +- .../com/twitter/algebird/SetDiffTest.scala | 4 +- .../com/twitter/algebird/SpaceSaverTest.scala | 5 +- .../algebird/SummingIteratorTest.scala | 9 +- .../util/summer/AsyncListMMapSum.scala | 14 ++- .../algebird/util/summer/AsyncListSum.scala | 5 +- .../util/TunnelMonoidProperties.scala | 11 ++- 42 files changed, 235 insertions(+), 296 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 3ec49cbeb..4c2e1036e 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.6.4 +version=2.7.1 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala index b59c0a787..35ccd7769 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala @@ -39,15 +39,14 @@ class SentinelCache[K, V](implicit sgv: Semigroup[V]) { def put(in: Map[K, V]): Unit = if (map.get.isDefined) { - in.foreach { - case (k, v) => - val newValue = - map.get - .flatMap(_.get(k)) - .map(oldV => sgv.plus(oldV, v)) - .getOrElse(v) - - map.get.foreach(_.put(k, newValue)) + in.foreach { case (k, v) => + val newValue = + map.get + .flatMap(_.get(k)) + .map(oldV => sgv.plus(oldV, v)) + .getOrElse(v) + + map.get.foreach(_.put(k, newValue)) } } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala index fbc06bc65..b04eaa448 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala @@ -159,9 +159,9 @@ object AdaptiveVector { implicit def equiv[V: Equiv]: Equiv[AdaptiveVector[V]] = Equiv.fromFunction[AdaptiveVector[V]] { (l, r) => (l.size == r.size) && (denseEquiv[V].equiv(l, r) || - toVector(l).view.zip(toVector(r)).forall { - case (lv, rv) => Equiv[V].equiv(lv, rv) - }) + toVector(l).view.zip(toVector(r)).forall { case (lv, rv) => + Equiv[V].equiv(lv, rv) + }) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala index 60e59ec11..afb7eb17f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala @@ -55,8 +55,8 @@ trait Applicative[M[_]] extends Functor[M] { joinWith(join(m1, m2), m3) { case ((t1, t2), t3) => (t1, t2, t3) } def join[T1, T2, T3, T4](m1: M[T1], m2: M[T2], m3: M[T3], m4: M[T4]): M[(T1, T2, T3, T4)] = - joinWith(join(join(m1, m2), m3), m4) { - case (((t1, t2), t3), t4) => (t1, t2, t3, t4) + joinWith(join(join(m1, m2), m3), m4) { case (((t1, t2), t3), t4) => + (t1, t2, t3, t4) } def join[T1, T2, T3, T4, T5]( @@ -66,8 +66,8 @@ trait Applicative[M[_]] extends Functor[M] { m4: M[T4], m5: M[T5] ): M[(T1, T2, T3, T4, T5)] = - joinWith(join(join(join(m1, m2), m3), m4), m5) { - case ((((t1, t2), t3), t4), t5) => (t1, t2, t3, t4, t5) + joinWith(join(join(join(m1, m2), m3), m4), m5) { case ((((t1, t2), t3), t4), t5) => + (t1, t2, t3, t4, t5) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index f03686e14..c12976739 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -175,7 +175,7 @@ object BloomFilter { val prob = 1 - scala.math.exp(t - 1 - s(nl)) * - scala.math.pow(s(nl) / (t - 1), t - 1) - + scala.math.pow(s(nl) / (t - 1), t - 1) - scala.math.exp(-scala.math.pow(t + 1 - s(nr), 2) / (2 * s(nr))) Approximate[Long](nl, n, nr, scala.math.max(0, prob)) @@ -327,8 +327,8 @@ object BF { } (a eq b) || ((a.numHashes == b.numHashes) && - (a.width == b.width) && - eqIntIter(toIntIt(a), toIntIt(b))) + (a.width == b.width) && + eqIntIter(toIntIt(a), toIntIt(b))) } } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index 91ca05cc6..7d16875d8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -181,9 +181,8 @@ class CMSSummation[K](params: CMSParams[K]) { case CMSItem(item, count, _) => insert(item, count) case SparseCMS(table, _, _) => - table.foreach { - case (item, c) => - insert(item, c) + table.foreach { case (item, c) => + insert(item, c) } case CMSInstance(CMSInstance.CountsTable(matrix), count, _) => var offset = 0 @@ -680,9 +679,8 @@ object SparseCMS { */ def toDense[K](exactCountTable: Map[K, Long], params: CMSParams[K]): CMS[K] = // Create new CMSInstace - exactCountTable.foldLeft(CMSInstance[K](params)) { - case (cms, (x, count)) => - cms + (x, count) + exactCountTable.foldLeft(CMSInstance[K](params)) { case (cms, (x, count)) => + cms + (x, count) } } @@ -700,9 +698,8 @@ case class CMSInstance[K]( case _: CMSZero[_] => this case other: CMSItem[K] => this + other.item case other: SparseCMS[K] => - other.exactCountTable.foldLeft(this) { - case (cms, (x, count)) => - cms + (x, count) + other.exactCountTable.foldLeft(this) { case (cms, (x, count)) => + cms + (x, count) } case other: CMSInstance[K] => val newTable = countsTable ++ other.countsTable @@ -758,10 +755,9 @@ case class CMSInstance[K]( require(count >= 0, "count must be >= 0 (negative counts not implemented") if (count != 0L) { val newCountsTable = - (0 to (depth - 1)).foldLeft(countsTable) { - case (table, row) => - val pos = (row, params.hashes(row)(item)) - table + (pos, count) + (0 to (depth - 1)).foldLeft(countsTable) { case (table, row) => + val pos = (row, params.hashes(row)(item)) + table + (pos, count) } CMSInstance[K](newCountsTable, totalCount + count, params) } else this @@ -1053,8 +1049,8 @@ abstract class HeavyHittersLogic[K] extends java.io.Serializable { } def updateHeavyHitters(cms: CMS[K])(left: HeavyHitters[K], right: HeavyHitters[K]): HeavyHitters[K] = { - val candidates = (left.items ++ right.items).map { - case i => HeavyHitter[K](i, cms.frequency(i).estimate) + val candidates = (left.items ++ right.items).map { case i => + HeavyHitter[K](i, cms.frequency(i).estimate) } val newHhs = HeavyHitters.from(candidates) purgeHeavyHitters(cms)(newHhs) @@ -1307,13 +1303,11 @@ case class ScopedTopNLogic[K1, K2](heavyHittersN: Int) extends HeavyHittersLogic val (underLimit, overLimit) = grouped.partition { _._2.size <= heavyHittersN } - val sorted = overLimit.transform { - case (_, hhs) => - hhs.toSeq.sortBy(hh => hh.count) + val sorted = overLimit.transform { case (_, hhs) => + hhs.toSeq.sortBy(hh => hh.count) } - val purged = sorted.transform { - case (_, hhs) => - hhs.takeRight(heavyHittersN) + val purged = sorted.transform { case (_, hhs) => + hhs.takeRight(heavyHittersN) } HeavyHitters[(K1, K2)](purged.values.flatten.toSet ++ underLimit.values.flatten.toSet) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala index b0181b4e7..0ab0c7eb9 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala @@ -317,13 +317,13 @@ final class DecayingCMS[K]( this.logScale == that.logScale && this.timeInHL == that.timeInHL && this.cells.length == that.cells.length && { - var i = 0 - while (i < depth) { - if (this.cells(i) != that.cells(i)) return false - i += 1 + var i = 0 + while (i < depth) { + if (this.cells(i) != that.cells(i)) return false + i += 1 + } + true } - true - } case _ => false } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index a2ea9cef6..4eb630e3c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -101,8 +101,8 @@ case class ExpHist( */ def fold: Fold[Bucket, ExpHist] = Fold.foldMutable[Builder[Bucket, Vector[Bucket]], Bucket, ExpHist]( - { - case (b, bucket) => b += bucket + { case (b, bucket) => + b += bucket }, _ => Vector.newBuilder[Bucket], x => addAll(x.result) @@ -422,8 +422,8 @@ object ExpHist { * @return vector of powers of 2 (where ret.sum == the original s) */ def toBuckets: Vector[Long] = - rep.iterator.zipWithIndex.flatMap { - case (i, exp) => Iterator.fill(i)(1L << exp) + rep.iterator.zipWithIndex.flatMap { case (i, exp) => + Iterator.fill(i)(1L << exp) }.toVector } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index d0d659f0e..69d2d832e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -84,13 +84,9 @@ sealed trait Fold[-I, +O] extends Serializable { val first = self.build() val second = other.build() new FoldState( - { - case ((x, y), i) => (first.add(x, i), second.add(y, i)) - }, + { case ((x, y), i) => (first.add(x, i), second.add(y, i)) }, (first.start, second.start), - { - case (x, y) => f(first.end(x), second.end(y)) - } + { case (x, y) => f(first.end(x), second.end(y)) } ) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index 6a8ad103e..c2eab81f5 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -442,21 +442,19 @@ case class SparseHLL(override val bits: Int, maxRhow: Map[Int, Max[Byte]]) exten override def updateInto(buffer: Array[Byte]): Unit = { assert(buffer.length == size, "Length mismatch") - maxRhow.foreach { - case (idx, maxb) => - buffer.update(idx, buffer(idx).max(maxb.get)) + maxRhow.foreach { case (idx, maxb) => + buffer.update(idx, buffer(idx).max(maxb.get)) } } override protected def downsize(reducedBits: Int, reducedSize: Int, bitMask: Int, buf: Array[Byte]): HLL = { val reducedMaxRhoW = collection.mutable.Map.empty[Int, Byte] - maxRhow.foreach { - case (j, rhoW) => - val modifiedRhoW = - getModifiedRhoW(j, rhoW.get, reducedBits, reducedSize, bitMask, buf) - val newJ = j % reducedSize - val newRhoW = reducedMaxRhoW.getOrElse(newJ, 0: Byte) - reducedMaxRhoW += (newJ -> (newRhoW.max(modifiedRhoW))) + maxRhow.foreach { case (j, rhoW) => + val modifiedRhoW = + getModifiedRhoW(j, rhoW.get, reducedBits, reducedSize, bitMask, buf) + val newJ = j % reducedSize + val newRhoW = reducedMaxRhoW.getOrElse(newJ, 0: Byte) + reducedMaxRhoW += (newJ -> (newRhoW.max(modifiedRhoW))) } SparseHLL(reducedBits, reducedMaxRhoW.iterator.map { case (k, v) => (k, Max(v)) }.toMap) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala index ef500d27b..443b00be2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala @@ -90,15 +90,13 @@ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { val monoid = new HyperLogLogMonoid(bits) if (rows.isEmpty) monoid.zero else { - monoid.sum(rows.iterator.zipWithIndex.map { - case (map, i) => - SparseHLL( - bits, - map.transform { - case _ => - Max((i + 1).toByte) - } - ) + monoid.sum(rows.iterator.zipWithIndex.map { case (map, i) => + SparseHLL( + bits, + map.transform { case _ => + Max((i + 1).toByte) + } + ) }) } } @@ -154,15 +152,14 @@ class HyperLogLogSeriesMonoid(val bits: Int) extends Monoid[HLLSeries] { if (left.size > right.size) { combine(right, left) } else { - left.foldLeft(right) { - case (m, (k, lv)) => - m.updated( - k, - m.get(k) match { - case None => lv - case Some(rv) => Math.max(lv, rv) - } - ) + left.foldLeft(right) { case (m, (k, lv)) => + m.updated( + k, + m.get(k) match { + case None => lv + case Some(rv) => Math.max(lv, rv) + } + ) } } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala index 39a7ba437..e1543e79c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala @@ -82,16 +82,15 @@ abstract class GenericMapMonoid[K, V, M <: ScMap[K, V]](implicit val semigroup: else { val mutable = MMap[K, V]() items.iterator.foreach { m => - m.foreach { - case (k, v) => - val oldVOpt = mutable.get(k) - // sorry for the micro optimization here: avoiding a closure - val newV = - if (oldVOpt.isEmpty) v else Semigroup.plus(oldVOpt.get, v) - if (nonZero(newV)) - mutable.update(k, newV) - else - mutable.remove(k) + m.foreach { case (k, v) => + val oldVOpt = mutable.get(k) + // sorry for the micro optimization here: avoiding a closure + val newV = + if (oldVOpt.isEmpty) v else Semigroup.plus(oldVOpt.get, v) + if (nonZero(newV)) + mutable.update(k, newV) + else + mutable.remove(k) } } Some(fromMutable(mutable)) @@ -119,9 +118,8 @@ class ScMapMonoid[K, V](implicit semigroup: Semigroup[V]) extends GenericMapMono */ class MapGroup[K, V](implicit val group: Group[V]) extends MapMonoid[K, V]()(group) with Group[Map[K, V]] { override def negate(kv: Map[K, V]): Map[K, V] = - kv.iterator.map { - case (k, v) => - (k, group.negate(v)) + kv.iterator.map { case (k, v) => + (k, group.negate(v)) }.toMap } @@ -129,9 +127,8 @@ class ScMapGroup[K, V](implicit val group: Group[V]) extends ScMapMonoid[K, V]()(group) with Group[ScMap[K, V]] { override def negate(kv: ScMap[K, V]): ScMap[K, V] = - kv.iterator.map { - case (k, v) => - (k, group.negate(v)) + kv.iterator.map { case (k, v) => + (k, group.negate(v)) }.toMap } @@ -172,9 +169,8 @@ class ScMapRing[K, V](implicit override val ring: Ring[V]) object MapAlgebra { def rightContainsLeft[K, V: Equiv](l: Map[K, V], r: Map[K, V]): Boolean = - l.forall { - case (k, v) => - r.get(k).exists(Equiv[V].equiv(_, v)) + l.forall { case (k, v) => + r.get(k).exists(Equiv[V].equiv(_, v)) } implicit def sparseEquiv[K, V: Monoid: Equiv]: Equiv[Map[K, V]] = @@ -218,16 +214,15 @@ object MapAlgebra { if (pairs.iterator.isEmpty) Map.empty else { val mutable = MMap[K, Builder[V, List[V]]]() - pairs.iterator.foreach { - case (k, v) => - val oldVOpt = mutable.get(k) - // sorry for the micro optimization here: avoiding a closure - val bldr = if (oldVOpt.isEmpty) { - val b = List.newBuilder[V] - mutable.update(k, b) - b - } else oldVOpt.get - bldr += v + pairs.iterator.foreach { case (k, v) => + val oldVOpt = mutable.get(k) + // sorry for the micro optimization here: avoiding a closure + val bldr = if (oldVOpt.isEmpty) { + val b = List.newBuilder[V] + mutable.update(k, b) + b + } else oldVOpt.get + bldr += v } mutable.iterator.map { case (k, bldr) => (k, bldr.result) }.toMap } @@ -240,13 +235,11 @@ object MapAlgebra { def join[K, V, W](map1: Map[K, V], map2: Map[K, W]): Map[K, (Option[V], Option[W])] = Monoid .plus( - map1.transform { - case (_, v) => - (List(v), List[W]()) + map1.transform { case (_, v) => + (List(v), List[W]()) }, - map2.transform { - case (_, w) => - (List[V](), List(w)) + map2.transform { case (_, w) => + (List[V](), List(w)) } ) .transform { case (_, (v, w)) => (v.headOption, w.headOption) } @@ -281,14 +274,13 @@ object MapAlgebra { def cube[K, V](it: TraversableOnce[(K, V)])(implicit c: Cuber[K]): Map[c.K, List[V]] = { val map: MMap[c.K, List[V]] = MMap[c.K, List[V]]() - it.iterator.foreach { - case (k, v) => - c(k).iterator.foreach { ik => - map.get(ik) match { - case Some(vs) => map += ik -> (v :: vs) - case None => map += ik -> List(v) - } + it.iterator.foreach { case (k, v) => + c(k).iterator.foreach { ik => + map.get(ik) match { + case Some(vs) => map += ik -> (v :: vs) + case None => map += ik -> List(v) } + } } map.foreach { case (k, v) => map(k) = v.reverse } new MutableBackedMap(map) @@ -305,14 +297,13 @@ object MapAlgebra { def rollup[K, V](it: TraversableOnce[(K, V)])(implicit r: Roller[K]): Map[r.K, List[V]] = { val map: MMap[r.K, List[V]] = MMap[r.K, List[V]]() - it.iterator.foreach { - case (k, v) => - r(k).iterator.foreach { ik => - map.get(ik) match { - case Some(vs) => map += ik -> (v :: vs) - case None => map += ik -> List(v) - } + it.iterator.foreach { case (k, v) => + r(k).iterator.foreach { ik => + map.get(ik) match { + case Some(vs) => map += ik -> (v :: vs) + case None => map += ik -> List(v) } + } } map.foreach { case (k, v) => map(k) = v.reverse } new MutableBackedMap(map) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala index 56acb8718..6e621c7ce 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala @@ -62,9 +62,8 @@ object Metric { val outP = pad(a).view .zip(pad(b)) - .map { - case (i, j) => - math.pow(implicitly[Metric[V]].apply(i, j), p) + .map { case (i, j) => + math.pow(implicitly[Metric[V]].apply(i, j), p) } .sum math.pow(outP, 1.0 / p) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala index d7e781c2f..643aa8ae2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala @@ -61,9 +61,8 @@ object SGDWeights { else { val newW = left.weights.view .zip(right.weights) - .map { - case (l: Double, r: Double) => - (lc * l + rc * r) / ((lc + rc).toDouble) + .map { case (l: Double, r: Double) => + (lc * l + rc * r) / ((lc + rc).toDouble) } .toIndexedSeq SGDWeights(lc + rc, newW) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala index 9189f27a5..33fddf9c1 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala @@ -100,14 +100,12 @@ class SketchMapMonoid[K, V](val params: SketchMapParams[K])(implicit val initTable = AdaptiveMatrix.fill[V](params.depth, params.width)(monoid.zero) /* For each row, update the table for each K,V pair */ - val newTable = (0 to (params.depth - 1)).foldLeft(initTable) { - case (table, row) => - data.foldLeft(table) { - case (innerTable, (key, value)) => - val pos = (row, params.hashes(row)(key)) - val currValue: V = innerTable.getValue(pos) - innerTable.updated(pos, Monoid.plus(currValue, value)) - } + val newTable = (0 to (params.depth - 1)).foldLeft(initTable) { case (table, row) => + data.foldLeft(table) { case (innerTable, (key, value)) => + val pos = (row, params.hashes(row)(key)) + val currValue: V = innerTable.getValue(pos) + innerTable.updated(pos, Monoid.plus(currValue, value)) + } } SketchMap(newTable, params.updatedHeavyHitters(heavyHitters, newTable), totalValue) @@ -156,8 +154,8 @@ case class SketchMapParams[K](seed: Int, width: Int, depth: Int, heavyHittersCou * Calculates the frequency for a key given a values table. */ def frequency[V: Ordering](key: K, table: AdaptiveMatrix[V]): V = - hashes.iterator.zipWithIndex.map { - case (hash, row) => table.getValue((row, hash(key))) + hashes.iterator.zipWithIndex.map { case (hash, row) => + table.getValue((row, hash(key))) }.min /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 591bfb5df..bc2757570 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -21,8 +21,8 @@ object SpaceSaver { SSMany(capacity, Map(item -> ((count, 0L)))) private[algebird] val ordering = - Ordering.by[(_, (Long, Long)), (Long, Long)] { - case (_, (count, err)) => (-count, err) + Ordering.by[(_, (Long, Long)), (Long, Long)] { case (_, (count, err)) => + (-count, err) } implicit def spaceSaverSemiGroup[T]: Semigroup[SpaceSaver[T]] = @@ -64,20 +64,19 @@ object SpaceSaver { buff = ByteBuffer.allocate(4) buff.putInt(counters.size) buffer ++= buff.array() - counters.foreach { - case (item, (a, b)) => - val itemAsBytes = tSerializer(item) + counters.foreach { case (item, (a, b)) => + val itemAsBytes = tSerializer(item) - buff = ByteBuffer.allocate(4) - buff.putInt(itemAsBytes.length) - buffer ++= buff.array() + buff = ByteBuffer.allocate(4) + buff.putInt(itemAsBytes.length) + buffer ++= buff.array() - buffer ++= itemAsBytes + buffer ++= itemAsBytes - buff = ByteBuffer.allocate(8 * 2) - buff.putLong(a) - buff.putLong(b) - buffer ++= buff.array() + buff = ByteBuffer.allocate(8 * 2) + buff.putLong(a) + buff.putLong(b) + buffer ++= buff.array() } buffer.result.toArray } @@ -168,9 +167,8 @@ sealed abstract class SpaceSaver[T] { .filter { case (_, (count, _)) => count >= thres } .toList .sorted(ordering) - .map { - case (item, (count, err)) => - (item, Approximate(count - err, count, count, 1.0), thres <= count - err) + .map { case (item, (count, err)) => + (item, Approximate(count - err, count, count, 1.0), thres <= count - err) } /** @@ -183,9 +181,8 @@ sealed abstract class SpaceSaver[T] { .sorted(ordering) val siK = si.take(k) val countKPlus1 = si.drop(k).headOption.map(_._2._1).getOrElse(0L) - siK.map { - case (item, (count, err)) => - (item, Approximate(count - err, count, count, 1.0), countKPlus1 < count - err) + siK.map { case (item, (count, err)) => + (item, Approximate(count - err, count, count, 1.0), countKPlus1 < count - err) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala index ef6cd8b9c..0ca62ebe2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala @@ -39,13 +39,12 @@ class SummingCache[K, V](capacity: Int)(implicit sgv: Semigroup[V]) extends Stat protected def optNonEmpty(m: Map[K, V]): Option[Map[K, V]] = if (m.isEmpty) None else Some(m) override def put(m: Map[K, V]): Option[Map[K, V]] = { - val replaced = m.map { - case (k, v) => - val newV = cache - .get(k) - .map(oldV => sgv.plus(oldV, v)) - .getOrElse(v) - (k, newV) + val replaced = m.map { case (k, v) => + val newV = cache + .get(k) + .map(oldV => sgv.plus(oldV, v)) + .getOrElse(v) + (k, newV) } cache ++= replaced diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala index 3bb1897bf..20ff16da7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala @@ -52,10 +52,9 @@ object Cuber { tq"_root_.scala.$tupleType[..$optionTypes]" } - val somes = params.zip(Stream.from(1)).map { - case (param, index) => - val name = termName(c)(s"some$index") - q"val $name = _root_.scala.Some(in.$param)" + val somes = params.zip(Stream.from(1)).map { case (param, index) => + val name = termName(c)(s"some$index") + q"val $name = _root_.scala.Some(in.$param)" } val options = (1 to arity).map { index => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/GroupMacro.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/GroupMacro.scala index 4bf32efe5..0b6e568a4 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/GroupMacro.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/GroupMacro.scala @@ -29,8 +29,8 @@ object GroupMacro { import c.universe._ val companion = getCompanionObject(c) - val negateList = getParams(c).zip(implicitInstances).map { - case (param, instance) => q"$instance.negate(x.$param)" + val negateList = getParams(c).zip(implicitInstances).map { case (param, instance) => + q"$instance.negate(x.$param)" } q"override def negate(x: $T): $T = $companion.apply(..$negateList)" @@ -40,8 +40,8 @@ object GroupMacro { import c.universe._ val companion = getCompanionObject(c) - val minusList = getParams(c).zip(implicitInstances).map { - case (param, instance) => q"$instance.minus(l.$param, r.$param)" + val minusList = getParams(c).zip(implicitInstances).map { case (param, instance) => + q"$instance.minus(l.$param, r.$param)" } q"override def minus(l: $T, r: $T): $T = $companion.apply(..$minusList)" diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/RingMacro.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/RingMacro.scala index 500373047..fe8b32b3a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/RingMacro.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/RingMacro.scala @@ -16,8 +16,8 @@ object RingMacro { q"implicitly[_root_.com.twitter.algebird.Ring[${param.typeSignatureIn(T.tpe)}]]" } - val timesList = params.zip(implicitRings).map { - case (param, instance) => q"$instance.times(l.$param, r.$param)" + val timesList = params.zip(implicitRings).map { case (param, instance) => + q"$instance.times(l.$param, r.$param)" } val oneList = implicitRings.map(instance => q"$instance.one") diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala index 919c746d0..9ca6bc900 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala @@ -58,10 +58,9 @@ object Roller { tq"_root_.scala.$tupleType[..$optionTypes]" } - val somes = params.zip(Stream.from(1)).map { - case (param, index) => - val name = termName(c)(s"some$index") - q"val $name = _root_.scala.Some(in.$param)" + val somes = params.zip(Stream.from(1)).map { case (param, index) => + val name = termName(c)(s"some$index") + q"val $name = _root_.scala.Some(in.$param)" } val items = (0 to arity).map { i => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/SemigroupMacro.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/SemigroupMacro.scala index 9bd70ad85..179270e48 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/SemigroupMacro.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/SemigroupMacro.scala @@ -26,8 +26,8 @@ object SemigroupMacro { import c.universe._ val companion = getCompanionObject(c) - val plusList = getParams(c).zip(implicitInstances).map { - case (param, instance) => q"$instance.plus(l.$param, r.$param)" + val plusList = getParams(c).zip(implicitInstances).map { case (param, instance) => + q"$instance.plus(l.$param, r.$param)" } q"def plus(l: $T, r: $T): $T = $companion.apply(..$plusList)" @@ -41,9 +41,8 @@ object SemigroupMacro { val sumOptionsGetted: List[c.Tree] = params.map(param => q"${param.name.asInstanceOf[TermName]}.get") - val getSumOptions = params.zip(implicitInstances).map { - case (param, instance) => - q"val ${param.name.asInstanceOf[TermName]} = $instance.sumOption(items.iterator.map(_.$param))" + val getSumOptions = params.zip(implicitInstances).map { case (param, instance) => + q"val ${param.name.asInstanceOf[TermName]} = $instance.sumOption(items.iterator.map(_.$param))" } val result = q"$companion.apply(..$sumOptionsGetted)" diff --git a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala index 754a420c6..29e60316d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala @@ -78,13 +78,11 @@ object AdaptiveMatrix { } private def sparseUpdate(storage: IndexedSeq[MMap[Int, V]], other: SparseColumnMatrix[V]): Unit = - other.rowsByColumns.zipWithIndex.foreach { - case (contents, indx) => - val curMap: MMap[Int, V] = storage(indx) - AdaptiveVector.toMap(contents).foreach { - case (col, value) => - curMap.update(col, Monoid.plus(value, curMap.getOrElse(col, innerZero))) - } + other.rowsByColumns.zipWithIndex.foreach { case (contents, indx) => + val curMap: MMap[Int, V] = storage(indx) + AdaptiveVector.toMap(contents).foreach { case (col, value) => + curMap.update(col, Monoid.plus(value, curMap.getOrElse(col, innerZero))) + } } private def goDense( @@ -98,9 +96,8 @@ object AdaptiveMatrix { val iter = storage.iterator while (iter.hasNext) { val curRow = iter.next - curRow.foreach { - case (col, value) => - buffer(row * cols + col) = value + curRow.foreach { case (col, value) => + buffer(row * cols + col) = value } row += 1 } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala index bd1e756cd..46a2f1bfc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala @@ -44,9 +44,8 @@ private class IterCallStatistics(threadSafe: Boolean) { override def toString: String = distribution.zipWithIndex - .map { - case (v, i) => - (if (i == maxBucket) ">" else "<" + pow2(i)) + ": " + v + .map { case (v, i) => + (if (i == maxBucket) ">" else "<" + pow2(i)) + ": " + v } .mkString(", ") + ", avg=" + total.toDouble / count + " count=" + count diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala index beaa7a4eb..eb151aa14 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala @@ -114,10 +114,9 @@ class HConsSemigroup[A, B <: HList](protected val a: Semigroup[A], protected val } else { val bufA = ArrayBufferedOperation.fromSumOption[A](1000)(a) val bufB = ArrayBufferedOperation.fromSumOption[B](1000)(b) - xs.foreach { - case a0 :: b0 => - bufA.put(a0) - bufB.put(b0) + xs.foreach { case a0 :: b0 => + bufA.put(a0) + bufB.put(b0) } Some(bufA.flush.get :: bufB.flush.get) } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala b/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala index b495b26b3..d9a5e1f14 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala @@ -102,9 +102,8 @@ object ApproximateProperty { results ++ exampleFailures ++ testsReturnedZeroProb } - val args = argsList.map { - case (name, value) => - Prop.Arg(name, value, 0, value, Pretty.prettyAny(value), Pretty.prettyAny(value)) + val args = argsList.map { case (name, value) => + Prop.Arg(name, value, 0, value, Pretty.prettyAny(value), Pretty.prettyAny(value)) } Prop.Result(success, args = args) diff --git a/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala b/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala index f7c02c0aa..a061afba1 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala @@ -20,9 +20,8 @@ object ArbitraryCaseClassMacro { val params = getParams(c) val types = getParamTypes(c) - val getsList = params.zip(types).map { - case (param, t) => - fq"${param.name} <- _root_.org.scalacheck.Arbitrary.arbitrary[$t]" + val getsList = params.zip(types).map { case (param, t) => + fq"${param.name} <- _root_.org.scalacheck.Arbitrary.arbitrary[$t]" } val paramsList = diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala index 59eb9ee32..e96b7efdb 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala @@ -206,9 +206,8 @@ class AggregatorLaws extends CheckProperties { forAll { (in: List[Int], ag: Aggregator[Int, Int, Int]) => val cumulative: List[Int] = ag.applyCumulatively(in) cumulative.size == in.size && - cumulative.zipWithIndex.forall { - case (sum, i) => - sum == ag.apply(in.take(i + 1)) + cumulative.zipWithIndex.forall { case (sum, i) => + sum == ag.apply(in.take(i + 1)) } } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala index 93fa9d6a6..d11417a95 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BatchedTest.scala @@ -9,9 +9,8 @@ import org.scalatest.propspec.AnyPropSpec object Helpers { implicit def arbitraryBatched[A: Arbitrary]: Arbitrary[Batched[A]] = { val item = arbitrary[A].map(Batched(_)) - val items = arbitrary[(A, List[A])].map { - case (a, as) => - Batched(a).append(as) + val items = arbitrary[(A, List[A])].map { case (a, as) => + Batched(a).append(as) } Arbitrary(Gen.oneOf(item, items)) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala index 1228d6ecd..d72e87b14 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala @@ -386,11 +386,10 @@ class BloomFilterTest extends AnyWordSpec with Matchers { val bf = bfMonoid.create(entries: _*) entries .map(entry => (entry, bfMonoid.create(entry))) - .foldLeft((bfMonoid.zero, bfMonoid.zero)) { - case ((left, leftAlt), (entry, _)) => - val (newLeftAlt, contained) = leftAlt.checkAndAdd(entry) - left.contains(entry) shouldBe contained - (left + entry, newLeftAlt) + .foldLeft((bfMonoid.zero, bfMonoid.zero)) { case ((left, leftAlt), (entry, _)) => + val (newLeftAlt, contained) = leftAlt.checkAndAdd(entry) + left.contains(entry) shouldBe contained + (left + entry, newLeftAlt) } entries.foreach(i => assert(bf.contains(i.toString).isTrue)) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala index ac8b1bda9..feb6e8f70 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala @@ -249,9 +249,8 @@ class CollectionSpecification extends CheckProperties { (MapAlgebra .toGraph(l) .toIterator - .flatMap { - case (k, sv) => - sv.map(v => (k, v)) + .flatMap { case (k, sv) => + sv.map(v => (k, v)) } .toSet == l) } @@ -325,8 +324,8 @@ class CollectionSpecification extends CheckProperties { for { m <- Arbitrary.arbitrary[Map[Int, T]] } yield AdaptiveVector.fromMap( - m.filter { - case (k, _) => (k < 1000) && (k >= 0) + m.filter { case (k, _) => + (k < 1000) && (k >= 0) }, sparse, 1000 diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala index 1eb793c69..488d86235 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala @@ -45,8 +45,8 @@ class CorrelationLaws extends CheckProperties { approxEq(EPS)(corr.meanX, momentX.mean) && approxEq(EPS)(corr.meanY, momentY.mean) && (l.length < 2 || - (approxEqOrBothNaN(EPS)(corr.stddevX, momentX.stddev) && - approxEqOrBothNaN(EPS)(corr.stddevY, momentY.stddev))) + (approxEqOrBothNaN(EPS)(corr.stddevX, momentX.stddev) && + approxEqOrBothNaN(EPS)(corr.stddevY, momentY.stddev))) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala index 195df1e32..f33aba363 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala @@ -65,9 +65,8 @@ class DecayingCMSProperties extends CheckProperties { def genSeq(cms0: module.CMS): Gen[module.CMS] = Gen.listOf(genItem).map { items => - items.foldLeft(cms0) { - case (cms, (t, k, n)) => - cms.add(t, k, n) + items.foldLeft(cms0) { case (cms, (t, k, n)) => + cms.add(t, k, n) } } @@ -174,9 +173,8 @@ class DecayingCMSProperties extends CheckProperties { val g = genCms(module, stdKey, genTimestamp(module), stdVal) forAll(g, genItems(module)) { (cms0, items) => val time = cms0.timeInHL - val cms1 = items.foldLeft(cms0) { - case (c, (_, k, v)) => - c.scaledAdd(time, k, v) + val cms1 = items.foldLeft(cms0) { case (c, (_, k, v)) => + c.scaledAdd(time, k, v) } val got = cms1.total.value val expected = cms0.total.value + items.map(_._3).sum @@ -313,12 +311,11 @@ class DecayingCMSProperties extends CheckProperties { property("innerProductRoot(x, x) = x for singleton x") { forAll { (module: DecayingCMS[String]) => - forAll(genItem(module)) { - case (t, k, v) => - val cms0 = module.empty.add(t, k, v) - val got = cms0.l2Norm.at(t) - val expected = v - Prop(close(got, expected)) :| s"got $got, expected $expected" + forAll(genItem(module)) { case (t, k, v) => + val cms0 = module.empty.add(t, k, v) + val got = cms0.l2Norm.at(t) + val expected = v + Prop(close(got, expected)) :| s"got $got, expected $expected" } } } @@ -393,9 +390,8 @@ class DecayingCMSProperties extends CheckProperties { val tlast = inputs.last._1 val dvm = new DecayedValueMonoid(0.0) - val dv = dvm.sum(inputs.map { - case (t, n) => - DecayedValue.build(n, (t.toDouble / 1000.0), halfLifeSecs) + val dv = dvm.sum(inputs.map { case (t, n) => + DecayedValue.build(n, (t.toDouble / 1000.0), halfLifeSecs) }) val expected = dvm.valueAsOf(dv, halfLifeSecs, (tlast.toDouble / 1000.0)) @@ -427,9 +423,8 @@ class DecayingCMSProperties extends CheckProperties { ) ) - regressions.foldLeft(Prop(true)) { - case (res, (k, items, hl)) => - res && law((makeModule(hl), k, items)) + regressions.foldLeft(Prop(true)) { case (res, (k, items, hl)) => + res && law((makeModule(hl), k, items)) } } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala index 98d4dfb17..dc9f81c37 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala @@ -89,8 +89,8 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { val mostRecentTs = buckets.last.timestamp val cutoff = conf.expiration(mostRecentTs) - val fullViaAdd = buckets.foldLeft(ExpHist.empty(conf)) { - case (e, Bucket(c, t)) => e.add(c, t) + val fullViaAdd = buckets.foldLeft(ExpHist.empty(conf)) { case (e, Bucket(c, t)) => + e.add(c, t) } val actualSum = actualBucketSum(buckets, cutoff) @@ -114,9 +114,8 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { // sequence of histograms, each with one more oldest bucket // dropped off of its tail. - val histograms = (0 until numBuckets).scanLeft(hist) { - case (e, _) => - e.copy(buckets = e.buckets.init, total = e.total - e.oldestBucketSize) + val histograms = (0 until numBuckets).scanLeft(hist) { case (e, _) => + e.copy(buckets = e.buckets.init, total = e.total - e.oldestBucketSize) } // every histogram's relative error stays within bounds. @@ -188,8 +187,8 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { val e = ExpHist.empty(conf) val incs = (0L until bucket.size) - .foldLeft(e) { - case (acc, _) => acc.inc(bucket.timestamp) + .foldLeft(e) { case (acc, _) => + acc.inc(bucket.timestamp) } .step(bucket.timestamp) val adds = e.add(bucket.size, bucket.timestamp) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala index 737c7f85f..22981c5fd 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala @@ -187,10 +187,9 @@ class IntervalLaws extends CheckProperties { property("toLeftClosedRightOpen is an Injection") { forAll { (intr: GenIntersection[Long], tests: List[Long]) => (intr.toLeftClosedRightOpen - .map { - case Intersection(InclusiveLower(low), ExclusiveUpper(high)) => - val intr2 = Interval.leftClosedRightOpen(low, high) - tests.forall(t => intr(t) == intr2(t)) + .map { case Intersection(InclusiveLower(low), ExclusiveUpper(high)) => + val intr2 = Interval.leftClosedRightOpen(low, high) + tests.forall(t => intr(t) == intr2(t)) } .getOrElse(true)) // none means this can't be expressed as this kind of interval } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala index 37732df67..2802d4f45 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ScanTest.scala @@ -66,10 +66,9 @@ class ScanTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyCh outputList.length should equal(inputList.length) outputList.zipWithIndex - .foreach { - case (ithOutput, i) => - val expectedOutput = inputList.slice(0, i + 1).mkString - ithOutput should equal(expectedOutput) + .foreach { case (ithOutput, i) => + val expectedOutput = inputList.slice(0, i + 1).mkString + ithOutput should equal(expectedOutput) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala index d920df00a..7fed100f9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala @@ -10,8 +10,8 @@ import org.scalatest.wordspec.AnyWordSpec object SetDiffTest { implicit def arbSetDiff[T: Arbitrary]: Arbitrary[SetDiff[T]] = - Arbitrary(arbitrary[(Set[T], Set[T])].map { - case (a, b) => SetDiff.of(a, b) + Arbitrary(arbitrary[(Set[T], Set[T])].map { case (a, b) => + SetDiff.of(a, b) }) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala index 49c1dcce4..adce742b7 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala @@ -82,9 +82,8 @@ class SpaceSaverTest extends AnyWordSpec with Matchers { } .reduce(sg.plus) .topK(20) - .foreach { - case (item, approx, _) => - assert(approx ~ exactCounts(item)) + .foreach { case (item, approx, _) => + assert(approx ~ exactCounts(item)) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala index 3fe48f447..ef4cf6fe9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SummingIteratorTest.scala @@ -27,11 +27,10 @@ object SummingIteratorTest { val zl = MapAlgebra.removeZeros(l) val zr = MapAlgebra.removeZeros(r) zl.size == zr.size && { - zl.forall { - case (k, v) => - zr.get(k) - .map(rv => Equiv[V].equiv(rv, v)) - .getOrElse(false) + zl.forall { case (k, v) => + zr.get(k) + .map(rv => Equiv[V].equiv(rv, v)) + .getOrElse(false) } } } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala index 12a90551c..d19bb5d73 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala @@ -56,9 +56,8 @@ class AsyncListMMapSum[Key, Value]( queueMap.clear l } - val result: Map[Key, Value] = curData.iterator.flatMap { - case (k, listV) => - sg.sumOption(listV).iterator.map(v => (k, v)) + val result: Map[Key, Value] = curData.iterator.flatMap { case (k, listV) => + sg.sumOption(listV).iterator.map(v => (k, v)) }.toMap tuplesOut.incrBy(result.size) @@ -70,11 +69,10 @@ class AsyncListMMapSum[Key, Value]( var newlyAddedTuples = 0 mutex.synchronized { - vals.foreach { - case (k, v) => - val existingV = queueMap.getOrElseUpdate(k, ListBuffer[Value]()) - existingV += v - newlyAddedTuples += 1 + vals.foreach { case (k, v) => + val existingV = queueMap.getOrElseUpdate(k, ListBuffer[Value]()) + existingV += v + newlyAddedTuples += 1 } presentTuples += newlyAddedTuples } diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala index d685fd72b..aca2517d7 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala @@ -128,9 +128,8 @@ class AsyncListSum[Key, Value]( def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = workPool { insertOp.incr - vals.foreach { - case (k, v) => - doInsert(k, v) + vals.foreach { case (k, v) => + doInsert(k, v) } if (elementsInCache.get >= innerBuffSize) { diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala index 176c6b387..e88425056 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/TunnelMonoidProperties.scala @@ -47,12 +47,11 @@ object TunnelMonoidProperties { } numbers.forall { _ => val toFeed = makeRandomInput(r.nextInt) - val finalResults = helper(numbers, toFeed).zip(helper(numbers, toFeed)).map { - case (f1, f2) => - for { - b1 <- f1 - b2 <- f2 - } yield b1 == b2 + val finalResults = helper(numbers, toFeed).zip(helper(numbers, toFeed)).map { case (f1, f2) => + for { + b1 <- f1 + b2 <- f2 + } yield b1 == b2 } Await.result(Future.collect(finalResults).map(_.forall(identity))) } From 32fe70895910b1c8747f95ad13532cf81932b014 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 19 Sep 2020 02:53:08 +0200 Subject: [PATCH 096/306] Update scalafmt-core to 2.7.2 (#868) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 4c2e1036e..1c443c81e 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.7.1 +version=2.7.2 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From 8b7bd86965bc77bbd440a71f021c73a57bdb2ab5 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 22 Sep 2020 14:10:22 +0200 Subject: [PATCH 097/306] Update sbt-scalafix to 0.9.21 (#869) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5dd9033af..586913e38 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -14,5 +14,5 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.20") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.21") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.3") From 5409e3380254f5e895e2a8cd56845c85d94272ec Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Wed, 23 Sep 2020 03:15:56 -1000 Subject: [PATCH 098/306] Add scale method to Moments (#850) Co-authored-by: Filipe Regadas --- .../twitter/algebird/CorrelationMonoid.scala | 40 +--- .../com/twitter/algebird/MomentsGroup.scala | 185 +++++++++++++----- .../com/twitter/algebird/MomentsLaws.scala | 62 ++++-- 3 files changed, 196 insertions(+), 91 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala index 128ee0ecc..d376735dc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala @@ -8,38 +8,6 @@ object Correlation { apply(x, 1.0) implicit val monoid: Monoid[Correlation] = CorrelationMonoid - - /** - * When combining averages, if the counts sizes are too close we - * should use a different algorithm. This constant defines how - * close the ratio of the smaller to the total count can be: - */ - private val STABILITY_CONSTANT = 0.1 - - /** - * Given two streams of doubles (weightN, an) and (weightK, ak) of form (weighted count, - * mean), calculates the mean of the combined stream. - * - * Uses a more stable online algorithm which should be suitable for - * large numbers of records similar to: - * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm - * - * This differs from the implementation in MomentsGroup.scala only in that here, the counts are weighted, and are - * thus doubles instead of longs - */ - def getCombinedMean(weightN: Double, an: Double, weightK: Double, ak: Double): Double = - if (weightN < weightK) getCombinedMean(weightK, ak, weightN, an) - else - (weightN + weightK) match { - case 0.0 => 0.0 - case newCount if newCount == weightN => an - case newCount => - val scaling = weightK / newCount - // a_n + (a_k - a_n)*(k/(n+k)) is only stable if n is not approximately k - if (scaling < STABILITY_CONSTANT) (an + (ak - an) * scaling) - else (weightN * an + weightK * ak) / newCount - } - } /** @@ -125,8 +93,8 @@ object CorrelationMonoid extends Monoid[Correlation] { else { val prodSumRatio = a.totalWeight * b.totalWeight / count - val m1x = Correlation.getCombinedMean(a.totalWeight, a.m1x, b.totalWeight, b.m1x) - val m1y = Correlation.getCombinedMean(a.totalWeight, a.m1y, b.totalWeight, b.m1y) + val m1x = Moments.getCombinedMeanDouble(a.totalWeight, a.m1x, b.totalWeight, b.m1x) + val m1y = Moments.getCombinedMeanDouble(a.totalWeight, a.m1y, b.totalWeight, b.m1y) val deltaX = b.m1x - a.m1x val deltaY = b.m1y - a.m1y @@ -174,8 +142,8 @@ object CorrelationMonoid extends Monoid[Correlation] { } else { val prodSumRatio = m0 * b.m0 / m0New - val m1xNew = Correlation.getCombinedMean(m0, m1x, b.m0, b.m1x) - val m1yNew = Correlation.getCombinedMean(m0, m1y, b.m0, b.m1y) + val m1xNew = Moments.getCombinedMeanDouble(m0, m1x, b.m0, b.m1x) + val m1yNew = Moments.getCombinedMeanDouble(m0, m1y, b.m0, b.m1y) val deltaX = b.m1x - m1x val deltaY = b.m1y - m1y diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index d893757d2..49989bffb 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -24,36 +24,84 @@ import algebra.{CommutativeGroup, CommutativeMonoid} * and kurtosis. * * m{i} denotes the ith central moment. + * + * This code manually inlines code to make it look like a case class. This is done + * because we changed the count from a Long to a Double to enable the scale method, + * which allows exponential decays of moments, but we didn't want to break backwards + * binary compatibility. */ -case class Moments(m0: Long, m1: Double, m2: Double, m3: Double, m4: Double) { +sealed class Moments(val m0D: Double, val m1: Double, val m2: Double, val m3: Double, val m4: Double) + extends Product + with Serializable { + def this(m0: Long, m1: Double, m2: Double, m3: Double, m4: Double) = { + this(m0.toDouble, m1, m2, m3, m4) + } + + def m0: Long = m0D.toLong + def count: Long = m0 + def totalWeight: Double = m0D + def mean: Double = m1 // Population variance, not sample variance. def variance: Double = - if (count > 1) - m2 / count - else - /* don't return junk when the moment is not defined */ - Double.NaN + m2 / m0D // Population standard deviation, not sample standard deviation. def stddev: Double = math.sqrt(variance) def skewness: Double = - if (count > 2) - math.sqrt(count) * m3 / math.pow(m2, 1.5) - else - /* don't return junk when the moment is not defined */ - Double.NaN + math.sqrt(m0D) * m3 / math.pow(m2, 1.5) def kurtosis: Double = - if (count > 3) - count * m4 / math.pow(m2, 2) - 3 + m0D * m4 / math.pow(m2, 2) - 3 + + override def productArity: Int = 5 + override def productElement(idx: Int): Any = + idx match { + case 0 => count + case 1 => m1 + case 2 => m2 + case 3 => m3 + case 4 => m4 + } + + override def canEqual(that: Any): Boolean = + that.isInstanceOf[Moments] + + def copy(c0: Long = count, v1: Double = m1, v2: Double = m2, v3: Double = m3, v4: Double = m4): Moments = { + val v0 = if (c0 == count) m0D else c0.toDouble + new Moments(m0D = v0, m1 = v1, m2 = v2, m3 = v3, m4 = v4) + } + + override def toString: String = + s"Moments($m0D, $m1, $m2, $m3, $m4)" + + override def hashCode: Int = scala.util.hashing.MurmurHash3.productHash(this) + + override def equals(that: Any): Boolean = + that match { + case thatM: Moments => + (m0D == thatM.m0D) && + (m1 == thatM.m1) && + (m2 == thatM.m2) && + (m3 == thatM.m3) && + (m4 == thatM.m4) + case _ => false + } + + /** + * Scale all the moments by a constant. This allows you to use Moments with exponential decay + */ + def scale(z: Double): Moments = + if (z < 0.0) // the "extraneous" if here is to avoid allocating the error message unless necessary + throw new IllegalArgumentException(s"cannot scale by negative value: $z") + else if (z == 0) + Moments.momentsMonoid.zero else - /* don't return junk when the moment is not defined */ - Double.NaN + new Moments(m0D = z * m0D, m1 = m1, m2 = z * m2, m3 = z * m3, m4 = z * m4) } object Moments { @@ -69,13 +117,59 @@ object Moments { def numericAggregator[N](implicit num: Numeric[N]): MonoidAggregator[N, Moments, Moments] = Aggregator.prepareMonoid { n: N => Moments(num.toDouble(n)) } - // Create a Moments object given a single value. This is useful for - // initializing moment calculations at the start of a stream. + /** + * Create a Moments object given a single value. + * This is useful for initializing moment calculations at the start of a stream. + */ def apply[V: Numeric](value: V)(implicit num: Numeric[V]): Moments = - apply(1L, num.toDouble(value), 0, 0, 0) + new Moments(1.0, num.toDouble(value), 0, 0, 0) def apply[V](m0: Long, m1: V, m2: V, m3: V, m4: V)(implicit num: Numeric[V]): Moments = new Moments(m0, num.toDouble(m1), num.toDouble(m2), num.toDouble(m3), num.toDouble(m4)) + + /** + * This it the legacy apply when count was a Long + */ + def apply(m0: Long, m1: Double, m2: Double, m3: Double, m4: Double): Moments = + new Moments(m0, m1, m2, m3, m4) + + /** + * This it the legacy unapply when count was a Long + */ + def unapply(m: Moments): Option[(Long, Double, Double, Double, Double)] = + Some((m.m0, m.m1, m.m2, m.m3, m.m4)) + + /** + * When combining averages, if the counts sizes are too close we + * should use a different algorithm. This constant defines how + * close the ratio of the smaller to the total count can be: + */ + private[this] val STABILITY_CONSTANT = 0.1 + + /** + * Given two streams of doubles (weightN, an) and (weightK, ak) of form (weighted count, + * mean), calculates the mean of the combined stream. + * + * Uses a more stable online algorithm which should be suitable for + * large numbers of records similar to: + * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm + * + * This differs from the implementation in MomentsGroup.scala only in that here, the counts are weighted, and are + * thus doubles instead of longs + */ + def getCombinedMeanDouble(weightN: Double, an: Double, weightK: Double, ak: Double): Double = + if (weightN < weightK) getCombinedMeanDouble(weightK, ak, weightN, an) + else + (weightN + weightK) match { + case 0.0 => 0.0 + case newCount if newCount == weightN => an + case newCount => + val scaling = weightK / newCount + // a_n + (a_k - a_n)*(k/(n+k)) is only stable if n is not approximately k + if (scaling < STABILITY_CONSTANT) (an + (ak - an) * scaling) + else (weightN * an + weightK * ak) / newCount + } + } class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { @@ -94,7 +188,10 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { * Uses a more stable online algorithm which should be suitable for * large numbers of records similar to: * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm + * + * we no longer use this, but we can't remove it due to binary compatibility */ + @deprecated("Use Moments.getCombinedMeanDouble instead", since = "0.13.8") def getCombinedMean(n: Long, an: Double, k: Long, ak: Double): Double = if (n < k) getCombinedMean(k, ak, n, an) else @@ -108,33 +205,33 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { else (n * an + k * ak) / newCount } - override val zero: Moments = Moments(0L, 0.0, 0.0, 0.0, 0.0) + override val zero: Moments = new Moments(0.0, 0.0, 0.0, 0.0, 0.0) // Combines the moment calculations from two streams. // See http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Higher-order_statistics // for more information on the formulas used to update the moments. override def plus(a: Moments, b: Moments): Moments = { - val countCombined = a.count + b.count - if (countCombined == 0L) zero + val countCombined = a.m0D + b.m0D + if (countCombined == 0.0) zero else { val delta = b.mean - a.mean - val meanCombined = getCombinedMean(a.count, a.mean, b.count, b.mean) + val meanCombined = Moments.getCombinedMeanDouble(a.m0D, a.mean, b.m0D, b.mean) val m2 = a.m2 + b.m2 + - math.pow(delta, 2) * a.count * b.count / countCombined + math.pow(delta, 2) * a.m0D * b.m0D / countCombined val m3 = a.m3 + b.m3 + - math.pow(delta, 3) * a.count * b.count * (a.count - b.count) / math.pow(countCombined, 2) + - 3 * delta * (a.count * b.m2 - b.count * a.m2) / countCombined + math.pow(delta, 3) * a.m0D * b.m0D * (a.m0D - b.m0D) / math.pow(countCombined, 2) + + 3 * delta * (a.m0D * b.m2 - b.m0D * a.m2) / countCombined val m4 = a.m4 + b.m4 + - math.pow(delta, 4) * a.count * b.count * (math.pow(a.count, 2) - - a.count * b.count + math.pow(b.count, 2)) / math.pow(countCombined, 3) + - 6 * math.pow(delta, 2) * (math.pow(a.count, 2) * b.m2 + - math.pow(b.count, 2) * a.m2) / math.pow(countCombined, 2) + - 4 * delta * (a.count * b.m3 - b.count * a.m3) / countCombined + math.pow(delta, 4) * a.m0D * b.m0D * (math.pow(a.m0D, 2) - + a.m0D * b.m0D + math.pow(b.m0D, 2)) / math.pow(countCombined, 3) + + 6 * math.pow(delta, 2) * (math.pow(a.m0D, 2) * b.m2 + + math.pow(b.m0D, 2) * a.m2) / math.pow(countCombined, 2) + + 4 * delta * (a.m0D * b.m3 - b.m0D * a.m3) / countCombined - Moments(countCombined, meanCombined, m2, m3, m4) + new Moments(countCombined, meanCombined, m2, m3, m4) } } @@ -145,7 +242,7 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { val init = iter.next() - var count: Long = init.count + var count: Double = init.m0D var mean: Double = init.mean var m2: Double = init.m2 var m3: Double = init.m3 @@ -161,30 +258,30 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { */ val b = iter.next() - val countCombined = count + b.count + val countCombined = count + b.m0D - if (countCombined == 0L) { + if (countCombined == 0.0) { mean = 0.0 m2 = 0.0 m3 = 0.0 m4 = 0.0 } else { val delta = b.mean - mean - val meanCombined = getCombinedMean(count, mean, b.count, b.mean) + val meanCombined = Moments.getCombinedMeanDouble(count, mean, b.m0D, b.mean) val m2Combined = m2 + b.m2 + - math.pow(delta, 2) * count * b.count / countCombined + math.pow(delta, 2) * count * b.m0D / countCombined val m3Combined = m3 + b.m3 + - math.pow(delta, 3) * count * b.count * (count - b.count) / math.pow(countCombined, 2) + - 3 * delta * (count * b.m2 - b.count * m2) / countCombined + math.pow(delta, 3) * count * b.m0D * (count - b.m0D) / math.pow(countCombined, 2) + + 3 * delta * (count * b.m2 - b.m0D * m2) / countCombined val m4Combined = m4 + b.m4 + - math.pow(delta, 4) * count * b.count * (math.pow(count, 2) - - count * b.count + math.pow(b.count, 2)) / math.pow(countCombined, 3) + + math.pow(delta, 4) * count * b.m0D * (math.pow(count, 2) - + count * b.m0D + math.pow(b.m0D, 2)) / math.pow(countCombined, 3) + 6 * math.pow(delta, 2) * (math.pow(count, 2) * b.m2 + - math.pow(b.count, 2) * m2) / math.pow(countCombined, 2) + - 4 * delta * (count * b.m3 - b.count * m3) / countCombined + math.pow(b.m0D, 2) * m2) / math.pow(countCombined, 2) + + 4 * delta * (count * b.m3 - b.m0D * m3) / countCombined mean = meanCombined m2 = m2Combined @@ -195,7 +292,7 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { count = countCombined } - Some(Moments(count, mean, m2, m3, m4)) + Some(new Moments(count, mean, m2, m3, m4)) } } @@ -211,7 +308,7 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { object MomentsGroup extends MomentsMonoid with Group[Moments] with CommutativeGroup[Moments] { override def negate(a: Moments): Moments = - Moments(-a.count, a.m1, -a.m2, -a.m3, -a.m4) + new Moments(-a.m0D, a.m1, -a.m2, -a.m3, -a.m4) } object MomentsAggregator extends MonoidAggregator[Double, Moments, Moments] { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index aa0fa344f..357688658 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -3,14 +3,16 @@ package com.twitter.algebird import com.twitter.algebird.BaseProperties._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec -import org.scalacheck.{Arbitrary, Gen} +import org.scalacheck.{Arbitrary, Gen, Prop} class MomentsLaws extends CheckProperties { + import Prop.forAll + val EPS: Double = 1e-10 implicit val equiv: Equiv[Moments] = Equiv.fromFunction { (ml, mr) => - (ml.m0 == mr.m0) && + approxEq(EPS)(ml.m0D, mr.m0D) && approxEq(EPS)(ml.m1, mr.m1) && approxEq(EPS)(ml.m2, mr.m2) && approxEq(EPS)(ml.m3, mr.m3) && @@ -37,12 +39,57 @@ class MomentsLaws extends CheckProperties { groupLaws[Moments] } + private val opGen: Gen[Moments] = + opBasedGen[Double](Gen.choose(-1e10, 1e10)) + property("Moments laws tested with operational generation") { - implicit val arbMom: Arbitrary[Moments] = - Arbitrary(opBasedGen[Double](Gen.choose(-1e10, 1e10))) + implicit val arbMom: Arbitrary[Moments] = Arbitrary(opGen) monoidLaws[Moments] } + + property("scaling by 0 and 1 works as you'd expect") { + forAll(opGen) { (mom: Moments) => + (mom.scale(0.0) == Monoid.zero[Moments]) && + mom.scale(1.0) == mom + } + } + + property("scaling by a and b is the same as scaling by a*b; similarly for addition") { + // use Int here instead of doubles so that we don't have to worry about overlfowing to Infinity and having to + // fine-tune numerical precision thresholds. + forAll(opGen, Gen.choose(0, Int.MaxValue), Gen.choose(0, Int.MaxValue)) { (mom, a0, b0) => + val a = a0 & Int.MaxValue + val b = b0 & Int.MaxValue + (equiv.equiv(mom.scale(a).scale(b), mom.scale(a.toDouble * b)) && + equiv.equiv(mom.scale(a.toDouble + b), Monoid.plus(mom.scale(a), mom.scale(b)))) + } + } + + property("adding together scaled moments is the same as scaling then adding") { + forAll(opGen, opGen, Gen.choose(0, Int.MaxValue)) { (mom1, mom2, z0) => + val z = z0 & Int.MaxValue + val addThenScale = Monoid.plus(mom1, mom2).scale(z) + val scaleThenAdd = Monoid.plus(mom1.scale(z), mom2.scale(z)) + equiv.equiv(addThenScale, scaleThenAdd) + } + } + + property("scaling does affect total weight, doesn't affect mean, variance, or moments") { + // def sign(x: Int): Int = if (x < 0) -1 else 1 + forAll(opGen, Gen.choose(0, Int.MaxValue)) { (mom, a0) => + val a = a0 & Int.MaxValue + val scaled = mom.scale(a.toDouble) + (a == 0) || { + approxEq(EPS)(scaled.totalWeight, mom.totalWeight * a) && + approxEq(EPS)(scaled.mean, mom.mean) && + approxEq(EPS)(scaled.variance, mom.variance) && + approxEqOrBothNaN(EPS)(scaled.skewness, mom.skewness) && + approxEqOrBothNaN(EPS)(scaled.kurtosis, mom.kurtosis) + } + } + + } } class MomentsTest extends AnyWordSpec with Matchers { @@ -114,11 +161,4 @@ class MomentsTest extends AnyWordSpec with Matchers { testApproxEq(m2.skewness, 0.84375) testApproxEq(m2.kurtosis, -0.921875) } - - "Moments should not return higher-order moments for small data sets" in { - val m1 = MomentsAggregator(List(1, 2)) - testApproxEq(m1.count, 2) - assert(m1.skewness.isNaN) - assert(m1.kurtosis.isNaN) - } } From ce8d4c0f3f273d8ab2c827dab4100404a579da72 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 23 Sep 2020 19:03:59 +0200 Subject: [PATCH 099/306] Update util-core to 20.9.0 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d1c48253d..33ef0566a 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.2.0" -val utilVersion = "20.8.1" +val utilVersion = "20.9.0" val sparkVersion = "2.4.7" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 0e74efa63900a1f3894ae53c94ee1b79881fc863 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lafur=20P=C3=A1ll=20Geirsson?= Date: Fri, 25 Sep 2020 16:16:03 +0200 Subject: [PATCH 100/306] Enable release drafter workflow to help write changelog (#871) This commit adds a CI workflow that updates a draft for the changelog of the next GitHub release. Here is an example release that's been created with release drafter https://github.com/scalameta/mdoc/releases/tag/v2.2.6 It's not necessary to publish the release as the "github-actions" user, you can copy-paste the draft into a new release if you prefer. The draft is helpful just to get a quick overview of what has changed since the last release. --- .github/release-drafter.yml | 4 ++++ .github/workflows/release-drafter.yml | 14 ++++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 .github/release-drafter.yml create mode 100644 .github/workflows/release-drafter.yml diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 000000000..a336f9b34 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,4 @@ +template: | + ## Pull Requests + + $CHANGES diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 000000000..17fdb961d --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,14 @@ +name: Release Drafter + +on: + push: + branches: + - master + +jobs: + update_release_draft: + runs-on: ubuntu-latest + steps: + - uses: release-drafter/release-drafter@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 0bd5b43d27a8c0ccf5cc59535ec188c3da3359b8 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 30 Sep 2020 16:40:00 +0200 Subject: [PATCH 101/306] Update scalafmt-core to 2.7.3 (#872) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 1c443c81e..47c121c45 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.7.2 +version=2.7.3 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From c7d2e5bed2d0e4e117e8e643aac4810588b76448 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 6 Oct 2020 21:21:07 +0200 Subject: [PATCH 102/306] Update scalafmt-core to 2.7.4 --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 47c121c45..39795acef 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.7.3 +version=2.7.4 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From 374177437e04db4645a82c6fa029c71289fb368d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 6 Oct 2020 22:44:07 +0200 Subject: [PATCH 103/306] Update sbt to 1.4.0 (#873) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 0837f7a13..6db984250 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.13 +sbt.version=1.4.0 From 622172f44edd7269f49dacf85c915313003be2b3 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 12 Oct 2020 03:49:40 +0200 Subject: [PATCH 104/306] Update junit to 4.13.1 (#875) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 33ef0566a..cce18364b 100644 --- a/build.sbt +++ b/build.sbt @@ -71,7 +71,7 @@ val sharedSettings = Seq( }, javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), libraryDependencies ++= Seq( - "junit" % "junit" % "4.13" % Test, + "junit" % "junit" % "4.13.1" % Test, "com.novocode" % "junit-interface" % "0.11" % Test ), // Publishing options: From 015d669646880d48c480176ace2496fb6d9fe9c3 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 17 Oct 2020 14:22:33 +0200 Subject: [PATCH 105/306] Update scalafmt-core to 2.7.5 (#876) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 39795acef..5344e8f45 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=2.7.4 +version=2.7.5 maxColumn = 110 docstrings = JavaDoc newlines.alwaysBeforeMultilineDef = false From 12c8e1d59ced9ddf2b679c7248d8fe7ba60776f0 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 19 Oct 2020 21:02:18 +0200 Subject: [PATCH 106/306] Update sbt-mima-plugin to 0.8.1 (#877) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 586913e38..e8a0deed2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") From 1f216431974dea6970e5dbf8f18a783f94c3b51d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 21 Oct 2020 01:08:45 +0200 Subject: [PATCH 107/306] Update sbt to 1.4.1 (#878) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 6db984250..08e4d7933 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.0 +sbt.version=1.4.1 From 16b91acc5b4c7573a1afa6318ac9c067e614266c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 28 Oct 2020 07:51:49 +0100 Subject: [PATCH 108/306] Update util-core to 20.10.0 (#879) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index cce18364b..9668a5ece 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.14.3" val scalaCollectionCompat = "2.2.0" -val utilVersion = "20.9.0" +val utilVersion = "20.10.0" val sparkVersion = "2.4.7" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 176ecb49211d2425993613657e433d4c90582e3f Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Sat, 31 Oct 2020 08:31:20 -1000 Subject: [PATCH 109/306] Use Kahan summation for Float and Double (#880) --- .../scala/com/twitter/algebird/Ring.scala | 20 ++++++-- .../com/twitter/algebird/AggregatorLaws.scala | 49 ++++++++++++++++++- 2 files changed, 63 insertions(+), 6 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala index 27deb1fe9..0092b92ee 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala @@ -144,14 +144,20 @@ object FloatRing extends Ring[Float] { override def minus(l: Float, r: Float): Float = l - r override def times(l: Float, r: Float): Float = l * r + // see: https://en.wikipedia.org/wiki/Kahan_summation_algorithm + // for this algorithm override def sumOption(t: TraversableOnce[Float]): Option[Float] = if (t.isEmpty) None else Some { - var sum = 0.0 val iter = t.toIterator + var sum = iter.next().toDouble + var c = 0.0 while (iter.hasNext) { - sum += iter.next().toDouble + val y = iter.next().toDouble - c + val t = sum + y + c = (t - sum) - y + sum = t } sum.toFloat @@ -166,14 +172,20 @@ object DoubleRing extends Ring[Double] { override def minus(l: Double, r: Double): Double = l - r override def times(l: Double, r: Double): Double = l * r + // see: https://en.wikipedia.org/wiki/Kahan_summation_algorithm + // for this algorithm override def sumOption(t: TraversableOnce[Double]): Option[Double] = if (t.isEmpty) None else Some { - var sum = 0.0 val iter = t.toIterator + var sum = iter.next() + var c = 0.0 while (iter.hasNext) { - sum += iter.next() + val y = iter.next() - c + val t = sum + y + c = (t - sum) - y + sum = t } sum diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala index e96b7efdb..d7acbd291 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala @@ -19,6 +19,45 @@ package com.twitter.algebird import org.scalacheck.Arbitrary import org.scalacheck.Prop._ import org.scalacheck.Prop +import org.scalatest.funsuite.AnyFunSuite + +/** + * Unit tests to highlight specific examples of the properties we guarantee. + */ +class AggregatorTests extends AnyFunSuite { + test("Kahan summation mitigates Double error accumulation") { + + val input = Stream.continually(0.01).take(1000) + + assert(9.999999999999831 == input.sum, "naive summation accumulates errors") + assert(Some(10.0) == DoubleRing.sumOption(input), "Kahan summation controls error accumulation") + assert(10.0 == Aggregator.numericSum[Double].apply(input)) + } + + test("Kahan summation mitigates Float error accumulation") { + val input = Stream.continually(0.01f).take(1000) + + assert(10.0001335f == input.sum, "naive summation accumulates errors") + assert(Some(10.0f) == FloatRing.sumOption(input), "Kahan summation controls error accumulation") + + // This version builds an aggregator directly from the FloatRing, which + // accesses the correct `sumOption` implementation. + assert(10.0f == Aggregator.fromMonoid[Float].apply(input)) + } + + test("Kahan summation works with Aggregator.numericSum[Float]") { + val input = Stream.continually(0.01f).take(1000) + val sum = Aggregator.numericSum[Float].apply(input) + + assert(10.0 != sum, "numericSum[Float].apply returns a Double, with error in the higher-precision noise.") + + // In fact, it's equivalent to first turning all inputs into Doubles, and + // then using the Kahan-enabled numericSum[Double]. + assert(sum == Aggregator.numericSum[Double].apply(input.map(_.toDouble))) + + assert(10.0f == sum.toFloat, "Converting back to float removes this noise.") + } +} class AggregatorLaws extends CheckProperties { @@ -89,9 +128,15 @@ class AggregatorLaws extends CheckProperties { def checkNumericSum[T: Arbitrary](implicit num: Numeric[T]): Prop = forAll { in: List[T] => val aggregator = Aggregator.numericSum[T] - aggregator(in) == in.map(num.toDouble).sum + val ares = aggregator(in) + val sres = in.map(num.toDouble).sum + (sres == ares) || { + (sres - ares).abs / (sres.abs + ares.abs) < 1e-5 + } } - property("Aggregator.numericSum is correct for Ints")(checkNumericSum[Int]) + property("Aggregator.numericSum is correct for Ints") { + checkNumericSum[Int] + } property("Aggregator.numericSum is correct for Longs") { checkNumericSum[Long] } From d0c4595a4e0ceaf12ca81ec988daea297c759b59 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 1 Nov 2020 02:48:20 +0100 Subject: [PATCH 110/306] Update scalacheck to 1.15.0 (#882) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 9668a5ece..a21b77f75 100644 --- a/build.sbt +++ b/build.sbt @@ -10,7 +10,7 @@ val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" -val scalacheckVersion = "1.14.3" +val scalacheckVersion = "1.15.0" val scalaCollectionCompat = "2.2.0" val utilVersion = "20.10.0" val sparkVersion = "2.4.7" From 0d1939a1e64c2b99a19c4f330666276534d7f094 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 2 Nov 2020 09:29:38 +0100 Subject: [PATCH 111/306] Update sbt to 1.4.2 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 08e4d7933..c19c768d6 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.1 +sbt.version=1.4.2 From 8cec7192ba70103759193148018abfeaa34639bb Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 5 Nov 2020 01:12:49 +0100 Subject: [PATCH 112/306] Update sbt-scalafix to 0.9.23 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e8a0deed2..3fb493110 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -14,5 +14,5 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.21") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.23") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.3") From 996310c381f44940392654683d9e63f7549bdc92 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 5 Nov 2020 12:30:52 +0100 Subject: [PATCH 113/306] Update sbt-ci-release to 1.5.4 (#885) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 3fb493110..96049e5fe 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -15,4 +15,4 @@ addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.23") -addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.3") +addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.4") From 52dac67848cce771d44449ec4bec9d71965b8fc3 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 5 Nov 2020 13:25:00 +0000 Subject: [PATCH 114/306] Remove unused plugins --- project/plugins.sbt | 2 -- 1 file changed, 2 deletions(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 96049e5fe..dfeb50446 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,12 +7,10 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") -addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.0.1") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.4") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.23") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.4") From ef83ad9573067c13a8a43f32c3233ec0ea5a1ddb Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 5 Nov 2020 16:33:24 +0000 Subject: [PATCH 115/306] Update .gitignore (#887) --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 40e7e8740..836905ce4 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ sonatype.sbt /target/ /src/jline/target/ +.bsp # target directories for ant build /build/ @@ -37,6 +38,7 @@ sonatype.sbt */.settings .bloop .metals +.vscode # bak files produced by ./cleanup-commit *.bak From 66944f990c83f89cb92bebe156a1c43241eadd87 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 5 Nov 2020 18:13:52 +0000 Subject: [PATCH 116/306] Experimental path dependent bloom filter (#840) * Add new immutable BitSet impl * Add first attempt on path dependent bloom filter * Update create/query bloom filter benchmarks * Add bloom filter fold/aggregator benchmark * Split instances * Remove toSet from BitSet * Fix format * Address Zero match, explicit result types, empty BitSet and side-effect fn * Remove implicit from aggregator * Fix npe * Add BitSet tests * move Hash object to hashToArray func * Rename BF to Hash * Optimize monoid sum * Optimize a little bit more * Fix format * Apply scalafix to tests * Relocate, tweak and clean * scalafmt / scalafix * Add BloomFilter#fromBitSet * fixup! Add BloomFilter#fromBitSet --- .../BloomFilterCreateBenchmark.scala | 46 +- .../benchmark/BloomFilterQueryBenchmark.scala | 9 + .../twitter/algebird/immutable/compat.scala | 15 + .../twitter/algebird/immutable/compat.scala | 15 + .../twitter/algebird/immutable/BitSet.scala | 1183 +++++++++++++++++ .../algebird/immutable/BloomFilter.scala | 432 ++++++ .../com/twitter/algebird/QTreeTest.scala | 3 +- .../algebird/immutable/BitSetTest.scala | 349 +++++ .../algebird/immutable/BloomFilterTest.scala | 459 +++++++ 9 files changed, 2507 insertions(+), 4 deletions(-) create mode 100644 algebird-core/src/main/scala-2.12-/com/twitter/algebird/immutable/compat.scala create mode 100644 algebird-core/src/main/scala-2.13+/com/twitter/algebird/immutable/compat.scala create mode 100644 algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala create mode 100644 algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala create mode 100644 algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala create mode 100644 algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterCreateBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterCreateBenchmark.scala index a43006484..897657b93 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterCreateBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterCreateBenchmark.scala @@ -1,4 +1,5 @@ package com.twitter.algebird + package benchmark import org.openjdk.jmh.annotations._ @@ -12,17 +13,20 @@ object BloomFilterCreateBenchmark { @State(Scope.Benchmark) class BloomFilterState { - @Param(Array("100", "1000", "10000")) + @Param(Array("10000", "50000")) var nbrOfElements: Int = 0 @Param(Array("0.001", "0.01")) var falsePositiveRate: Double = 0 var randomStrings: Seq[String] = _ + var immutableBF: immutable.BloomFilter[String] = _ @Setup(Level.Trial) - def setup(): Unit = + def setup(): Unit = { randomStrings = createRandomString(nbrOfElements, 10) + immutableBF = immutable.BloomFilter[String](nbrOfElements, falsePositiveRate) + } } } @@ -37,4 +41,42 @@ class BloomFilterCreateBenchmark { val bf = bfMonoid.create(bloomFilterState.randomStrings: _*) bf } + + @Benchmark + def createImmutableBloomFilter( + bloomFilterState: BloomFilterState + ): immutable.BloomFilter[String]#Hash = + bloomFilterState.immutableBF.create(bloomFilterState.randomStrings: _*) + + @Benchmark + def createBloomFilterUsingFold(bloomFilterState: BloomFilterState): BF[String] = { + val bfMonoid = BloomFilter[String](bloomFilterState.nbrOfElements, bloomFilterState.falsePositiveRate) + val bf = bloomFilterState.randomStrings.foldLeft(bfMonoid.zero) { case (filter, string) => + filter + string + } + bf + } + + @Benchmark + def createImmutableBloomFilterUsingFold( + bloomFilterState: BloomFilterState + ): immutable.BloomFilter[String]#Hash = + bloomFilterState.randomStrings.foldLeft(bloomFilterState.immutableBF.empty) { case (filter, string) => + filter + string + } + + @Benchmark + def createBloomFilterAggregator(bloomFilterState: BloomFilterState): BF[String] = { + val bfMonoid = BloomFilter[String](bloomFilterState.nbrOfElements, bloomFilterState.falsePositiveRate) + val bfAggregator = BloomFilterAggregator(bfMonoid) + val bf = bloomFilterState.randomStrings.aggregate(bfAggregator.monoid.zero)(_ + _, _ ++ _) + bf + } + + @Benchmark + def createImmutableBloomFilterAggregator( + bloomFilterState: BloomFilterState + ): immutable.BloomFilter[String]#Hash = + bloomFilterState.randomStrings + .aggregate(bloomFilterState.immutableBF.aggregator.monoid.zero)(_ + _, (a, b) => a ++ b) } diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterQueryBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterQueryBenchmark.scala index 33d07f30b..c2a2a76eb 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterQueryBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/BloomFilterQueryBenchmark.scala @@ -1,4 +1,5 @@ package com.twitter.algebird + package benchmark import org.openjdk.jmh.annotations._ @@ -15,6 +16,7 @@ object BloomFilterQueryBenchmark { var falsePositiveRate: Double = 0 var bf: BF[String] = _ + var immutableBF: immutable.BloomFilter[String]#Hash = _ @Setup(Level.Trial) def setup(): Unit = { @@ -22,6 +24,9 @@ object BloomFilterQueryBenchmark { BloomFilterCreateBenchmark.createRandomString(nbrOfElements, 10) bf = BloomFilter[String](nbrOfElements, falsePositiveRate) .create(randomStrings: _*) + immutableBF = immutable + .BloomFilter[String](nbrOfElements, falsePositiveRate) + .create(randomStrings: _*) } } } @@ -32,4 +37,8 @@ class BloomFilterQueryBenchmark { @Benchmark def queryBloomFilter(bloomFilterState: BloomFilterState): ApproximateBoolean = bloomFilterState.bf.contains("1") + + @Benchmark + def queryBloomFilterExperimental(bloomFilterState: BloomFilterState): ApproximateBoolean = + bloomFilterState.immutableBF.contains("1") } diff --git a/algebird-core/src/main/scala-2.12-/com/twitter/algebird/immutable/compat.scala b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/immutable/compat.scala new file mode 100644 index 000000000..bf2c4a20e --- /dev/null +++ b/algebird-core/src/main/scala-2.12-/com/twitter/algebird/immutable/compat.scala @@ -0,0 +1,15 @@ +package com.twitter.algebird.immutable + +private[algebird] object compat { + class BitSetWrapperSet(bitset: BitSet) extends Set[Int] { + override def contains(i: Int): Boolean = bitset(i) + + override def iterator: Iterator[Int] = bitset.iterator + + override def +(i: Int): BitSetWrapperSet = new BitSetWrapperSet(bitset + i) + + override def -(i: Int): BitSetWrapperSet = new BitSetWrapperSet(bitset - i) + + override def empty: Set[Int] = BitSet.empty.toSet + } +} diff --git a/algebird-core/src/main/scala-2.13+/com/twitter/algebird/immutable/compat.scala b/algebird-core/src/main/scala-2.13+/com/twitter/algebird/immutable/compat.scala new file mode 100644 index 000000000..8efa8ce32 --- /dev/null +++ b/algebird-core/src/main/scala-2.13+/com/twitter/algebird/immutable/compat.scala @@ -0,0 +1,15 @@ +package com.twitter.algebird.immutable + +private[algebird] object compat { + class BitSetWrapperSet(bitset: BitSet) extends Set[Int] { + override def contains(i: Int): Boolean = bitset(i) + + override def iterator: Iterator[Int] = bitset.iterator + + override def incl(i: Int): BitSetWrapperSet = new BitSetWrapperSet(bitset + i) + + override def excl(i: Int): BitSetWrapperSet = new BitSetWrapperSet(bitset - i) + + override def empty: Set[Int] = BitSet.empty.toSet + } +} diff --git a/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala new file mode 100644 index 000000000..df25bb628 --- /dev/null +++ b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala @@ -0,0 +1,1183 @@ +/* +Copyright 2020 Twitter, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + */ +package com.twitter.algebird.immutable + +import java.lang.Long.bitCount +import scala.annotation.tailrec + +import BitSet.{Branch, Empty, Leaf} + +// This implementation uses a lot of tricks for performance, which +// scalastyle is not always happy with. So we disable it. +// +// scalastyle:off + +/** + * A fast, immutable BitSet. + * + * This implementation is taken from cats-collections. + * https://github.com/typelevel/cats-collections/blob/0336992942aba9aba4a322b629447fcabe251920/core/src/main/scala/cats/collections/BitSet.scala + * + * A Bitset is a specialized type of set that tracks the `Int` values + * it contains: for each integer value, a BitSet uses a single bit to + * track whether the value is present (1) or absent (0). Bitsets are + * often sparse, since "missing" bits can be assumed to be zero. + * + * Unlike scala's default immutable this BitSet does not do a full + * copy on each added value. + * + * Internally the implementation is a tree. Each leaf uses an + * Array[Long] value to hold up to 2048 bits, and each branch uses an + * Array[BitSet] to hold up to 32 subtrees (null subtrees are treated + * as empty). + * + * Bitset treats the values it stores as 32-bit unsigned values, which + * is relevant to the internal addressing methods as well as the order + * used by `iterator`. + * + * The benchmarks suggest this bitset is MUCH faster than Scala's + * built-in bitset for cases where you may need many modifications and + * merges, (for example in a BloomFilter). + */ +sealed abstract class BitSet { lhs => + + /** + * Offset is the first value that this subtree contains. + * + * Offset will always be a multiple of 2048 (2^11). + * + * The `offset` is interpreted as a 32-bit unsigned integer. In + * other words, `(offset & 0xffffffffL)` will return the equivalent + * value as a signed 64-bit integer (between 0 and 4294967295). + */ + private[algebird] def offset: Int + + /** + * Limit is the first value beyond the range this subtree + * supports. + * + * In other words, the last value in the subtree's range is `limit - 1`. + * Like `offset`, `limit` will always be a multiple of 2048. + * + * Offset, limit, and height are related: + * + * limit = offset + (32^height) * 2048 + * limit > offset (assuming both values are unsigned) + * + * Like `offset`, `limit` is interpreted as a 32-bit unsigned + * integer. + */ + private[algebird] def limit: Long + + /** + * Height represents the number of "levels" this subtree contains. + * + * For leaves, height is zero. For branches, height will always be + * between 1 and 5. This is because a branch with offset=0 and + * height=5 will have limit=68719476736, which exceeds the largest + * unsigned 32-bit value we might want to store (4294967295). + * + * The calculation `(32^height) * 2048` tells you how many values a + * subtree contains (i.e. how many bits it holds). + */ + private[algebird] def height: Int + + /** + * Look for a particular value in the bitset. + * + * Returns whether this value's bit is set. + */ + def apply(n: Int): Boolean + + /** + * Return a bitset that contains `n` and whose other values are + * identical to this one's. If this bitset already contains `n` then this + * method does nothing. + */ + def +(n: Int): BitSet + + /** + * Return a bitset that does not contain `n` and whose other values + * are identical to this one's. If this bitset does not contain `n` + * then this method does nothing. + */ + def -(n: Int): BitSet + + /** + * Return the union of two bitsets as a new immutable bitset. + * + * If either bitset contains a given value, the resulting bitset + * will also contain it. + */ + def |(rhs: BitSet): BitSet + + /** + * Return the intersection of two bitsets as a new immutable bitset. + * + * The resulting bitset will only contain a value if that value is + * present in both input bitsets. + */ + def &(rhs: BitSet): BitSet + + /** + * Returns whether the two bitsets intersect or not. + * + * Equivalent to (x & y).nonEmpty but faster. + */ + def intersects(rhs: BitSet): Boolean + + /** + * Return the exclusive-or of two bitsets as a new immutable bitset. + */ + def ^(rhs: BitSet): BitSet + + /** + * Return this bitset minus the bits contained in the other bitset + * as a new immutable bitset. + * + * The resulting bitset will contain exactly those values which do + * appear in the left-hand side but do not appear in the right-hand + * side. + * + * If the bitsets do not intersect, the left-hand side will be + * returned. + */ + def --(rhs: BitSet): BitSet + + // Internal mutability + // + // The following three methods (`+=`, `-=`, and `mutableAdd`) all + // potentially mutate `this`. + // + // These methods are used internally by BitSet's public methods to + // mutate newly-constructed trees before returning them to the + // caller. This allows us to avoid unnecessary allocations when we + // are doing a high-level operation which may result in many + // separate modifications. + + /** + * Add a single value `n` to this bitset. + * + * This method modifies this bitset. We require that the value `n` + * is in this node's range (i.e. `offset <= n < limit`). + */ + private[algebird] def +=(n: Int): Unit + + /** + * Add all values from `rhs` to this bitset. + * + * This method modifies this bitset. We require that `this` and + * `rhs` are aligned (i.e. they both must have the same `offset` and + * `height`). + */ + private[algebird] def |=(rhs: BitSet): Unit + + /** + * Add a single value `n` to this bitset to this bitset or to the + * smallest valid bitset that could contain it. + * + * Unlike `+=` this method can be called with `n` outside of this + * node's range. If the value is in range, the method is equivalent + * to `+=` (and returns `this`). Otherwise, it wraps `this` in new + * branches until the node's range is large enough to contain `n`, + * then adds the value to that node, and returns it. + */ + private[algebird] def mutableAdd(n: Int): BitSet + + private[algebird] def mutableAdd(ns: Array[Int]): BitSet = { + var bs = this + var i = 0 + while (i < ns.length) { + bs = bs.mutableAdd(ns(i)) + i += 1 + } + bs + } + + /** + * Return a compacted bitset containing the same values as this one. + * + * This method is used to prune out "empty" branches that don't + * contain values. By default, bitset does not try to remove empty + * leaves when removing values (since repeatedly checking for this + * across many deletions would be expensive). + * + * The bitset returned will have the same values as the current + * bitset, but is guaranteed not to contain any empty branches. + * Empty branches are not usually observable but would result in + * increased memory usage. + */ + def compact: BitSet = { + def recur(x: BitSet): BitSet = + x match { + case leaf @ Leaf(_, _) => + if (leaf.isEmpty) null else leaf + case Branch(o, h, cs0) => + var i = 0 + var found: BitSet = null + while (i < 32 && found == null) { + val c = cs0(i) + if (c != null) found = recur(c) + i += 1 + } + if (found == null) { + null + } else { + val cs1 = new Array[BitSet](32) + cs1(i - 1) = found + while (i < 32) { + val c = cs0(i) + if (c != null) cs1(i) = recur(c) + i += 1 + } + Branch(o, h, cs1) + } + } + val res = recur(this) + if (res == null) Empty else res + } + + /** + * Returns the number of distinct values in this bitset. + * + * For branches, this method will return the sum of the sizes of all + * its subtrees. For leaves it returns the number of bits set in the + * leaf (i.e. the number of values the leaf contains). + */ + def size: Long + + /** + * Iterate across all values in the bitset. + * + * Values in the iterator will be seen in "unsigned order" (e.g. if + * present, -1 will always come last). Here's an abbreviated view of + * this order in practice: + * + * 0, 1, 2, ... 2147483646, 2147483647, -2147483648, -2147483647, ... -1 + * + * (This "unsigned order" is identical to the tree's internal order.) + */ + def iterator: Iterator[Int] + + /** + * Iterate across all values in the bitset in reverse order. + * + * The order here is exactly the reverse of `.iterator`. + */ + def reverseIterator: Iterator[Int] + + /** + * Present a view of this bitset as a `scala.Set[Int]`. + * + * This is provided for compatibility with Scala collections. Many + * of the set operations are implemented in terms of `BitSet`, but + * other operations (for example `map`) may copy these values into a + * different `Set` implementation. + */ + def toSet: Set[Int] = + new compat.BitSetWrapperSet(this) + + /** + * Returns false if this bitset contains values, true otherwise. + */ + def isEmpty: Boolean + + /** + * Returns true if this bitset contains values, false otherwise. + */ + def nonEmpty: Boolean = !isEmpty + + /** + * Produce a string representation of this BitSet. + * + * This representation will contain all the values in the bitset. + * For large bitsets, this operation may be very expensive. + */ + override def toString: String = + iterator.map(_.toString).mkString("BitSet(", ", ", ")") + + /** + * Produce a structured representation of this BitSet. + * + * This representation is for internal-use only. It gives a view of + * how the bitset is encoded in a tree, showing leaves and branches. + */ + private[algebird] def structure: String = + // This is for debugging, we don't care about coverage here + // $COVERAGE-OFF$ + this match { + case Branch(o, h, cs) => + val s = cs.iterator.zipWithIndex + .filter { case (c, _) => c != null } + .map { case (c, i) => s"$i -> ${c.structure}" } + .mkString("Array(", ", ", ")") + s"Branch($o, $h, $s)" + case Leaf(o, vs) => + val s = vs.zipWithIndex + .collect { + case (n, i) if n != 0 => s"$i -> $n" + } + .mkString("{", ", ", "}") + s"Leaf($o, $s)" + } + // $COVERAGE-ON$ + + /** + * Universal equality. + * + * This method will only return true if the right argument is also a + * `BitSet`. It does not attempt to coerce either argument in any + * way (unlike Scala collections, for example). + * + * Two bitsets can be equal even if they have different underlying + * tree structure. (For example, one bitset's tree may have empty + * branches that the other lacks.) + */ + override def equals(that: Any): Boolean = + that match { + case t: BitSet => + val it0 = this.iterator + val it1 = t.iterator + while (it0.hasNext && it1.hasNext) { + if (it0.next() != it1.next()) return false + } + it0.hasNext == it1.hasNext + case _ => + false + } + + /** + * Universal hash code. + * + * Bitsets that are the equal will hash to the same value. As in + * `equals`, the values present determine the hash code, as opposed + * to the tree structure. + */ + override def hashCode: Int = { + var hash: Int = 1500450271 // prime number + val it = iterator + while (it.hasNext) { + hash = (hash * 1023465798) + it.next() // prime number + } + hash + } + +} + +object BitSet { + + /** + * Returns an empty immutable bitset. + */ + final def empty: BitSet = Empty + + /** + * Singleton value representing an empty bitset. + */ + final val Empty: BitSet = + newEmpty(0) + + /** + * Returns an empty leaf. + * + * This is used internally with the assumption that it will be + * mutated to "add" values to it. In cases where no values need to + * be added, `empty` should be used instead. + */ + @inline private[algebird] def newEmpty(offset: Int): BitSet = + Leaf(offset, new Array[Long](32)) + + /** + * Construct an immutable bitset from the given integer values. + */ + final def apply(xs: Int*): BitSet = + if (xs.isEmpty) Empty + else { + var bs = newEmpty(0) + val iter = xs.iterator + while (iter.hasNext) { + bs = bs.mutableAdd(iter.next()) + } + bs + } + + final def apply(xs: Array[Int]): BitSet = + if (xs.length == 0) Empty + else { + var bs = newEmpty(0) + var idx = 0 + while (idx < xs.length) { + bs = bs.mutableAdd(xs(idx)) + idx += 1 + } + bs + } + + /** + * Given a value (`n`), and offset (`o`) and a height (`h`), compute + * the array index used to store the given value's bit. + */ + @inline private[algebird] def index(n: Int, o: Int, h: Int): Int = + (n - o) >>> (h * 5 + 6) + + case class InternalError(msg: String) extends Exception(msg) + + /** + * Construct a parent for the given bitset. + * + * The parent is guaranteed to be correctly aligned, and to have a + * height one greater than the given bitset. + */ + private[algebird] def parentFor(b: BitSet): BitSet = { + val h = b.height + 1 + val o = b.offset & -(1 << (h * 5 + 11)) + val cs = new Array[BitSet](32) + val i = (b.offset - o) >>> (h * 5 + 6) + cs(i) = b + Branch(o, h, cs) + } + + /** + * Return a branch containing the given bitset `b` and value `n`. + * + * This method assumes that `n` is outside of the range of `b`. It + * will return the smallest branch that contains both `b` and `n`. + */ + @tailrec + private def adoptedPlus(b: BitSet, n: Int): Branch = { + val h = b.height + 1 + val o = b.offset & -(1 << (h * 5 + 11)) + val cs = new Array[BitSet](32) + val parent = Branch(o, h, cs) + val i = (b.offset - o) >>> (h * 5 + 6) + // this looks unsafe since we are going to mutate parent which points + // to b, but critically we never mutate the Array containing b + cs(i) = b + val j = BitSet.index(n, o, h) + if (j < 0 || 32 <= j) { + adoptedPlus(parent, n) + } else { + parent += n + parent + } + } + + /** + * Return a branch containing the given bitsets `b` and `rhs`. + * + * This method assumes that `rhs` is at least partially-outside of + * the range of `b`. It will return the smallest branch that + * contains both `b` and `rhs`. + */ + @tailrec + private def adoptedUnion(b: BitSet, rhs: BitSet): BitSet = { + val h = b.height + 1 + val o = b.offset & -(1 << (h * 5 + 11)) + val cs = new Array[BitSet](32) + val parent = Branch(o, h, cs) + val i = (b.offset - o) >>> (h * 5 + 6) + cs(i) = b + val j = BitSet.index(rhs.offset, o, h) + if (j < 0 || 32 <= j || rhs.height > parent.height) { + adoptedUnion(parent, rhs) + } else { + // we don't own parent, because it points to b + // so we can't use mutating union here: + // If we can be sure that b and rhs don't share structure that will be mutated + // then we could mutate: + // parent |= rhs + // parent + parent | rhs + } + } + + private case class Branch(offset: Int, height: Int, children: Array[BitSet]) extends BitSet { + + @inline private[algebird] def limit: Long = offset + (1L << (height * 5 + 11)) + + @inline private[algebird] def index(n: Int): Int = (n - offset) >>> (height * 5 + 6) + @inline private[algebird] def valid(i: Int): Boolean = 0 <= i && i < 32 + @inline private[algebird] def invalid(i: Int): Boolean = i < 0 || 32 <= i + + def apply(n: Int): Boolean = { + val i = index(n) + valid(i) && { + val c = children(i) + c != null && c(n) + } + } + + def isEmpty: Boolean = { + var idx = 0 + while (idx < children.length) { + val c = children(idx) + val empty = (c == null) || c.isEmpty + if (!empty) return false + idx += 1 + } + true + } + + def newChild(i: Int): BitSet = { + val o = offset + i * (1 << height * 5 + 6) + if (height == 1) BitSet.newEmpty(o) + else Branch(o, height - 1, new Array[BitSet](32)) + } + + def +(n: Int): BitSet = { + val i = index(n) + if (invalid(i)) { + BitSet.adoptedPlus(this, n) + } else { + val c0 = children(i) + val c1 = + if (c0 != null) c0 + n + else { + val cc = newChild(i) + cc += n + cc + } + // we already had this item + if (c0 eq c1) this + else replace(i, c1) + } + } + + def replace(i: Int, child: BitSet): Branch = { + val cs = new Array[BitSet](32) + System.arraycopy(children, 0, cs, 0, 32) + cs(i) = child + copy(children = cs) + } + + def -(n: Int): BitSet = { + val i = index(n) + if (invalid(i)) this + else { + val c = children(i) + if (c == null) this + else { + val c1 = c - n + if (c1 eq c) this // we don't contain n + else replace(i, c - n) + } + } + } + + def |(rhs: BitSet): BitSet = + if (this eq rhs) { + this + } else if (height > rhs.height) { + if (rhs.offset < offset || limit <= rhs.offset) { + // this branch doesn't contain rhs + BitSet.adoptedUnion(this, rhs) + } else { + // this branch contains rhs, so find its index + val i = index(rhs.offset) + val c0 = children(i) + val c1 = + if (c0 != null) c0 | rhs + else if (height == 1) rhs + else { + val cc = newChild(i) + cc |= rhs + cc + } + replace(i, c1) + } + } else if (height < rhs.height) { + // use commuativity to handle this in previous case + rhs | this + } else if (offset != rhs.offset) { + // same height, but non-overlapping + BitSet.adoptedUnion(this, rhs) + } else { + // height == rhs.height, so we know rhs is a Branch. + val Branch(_, _, rcs) = rhs + val cs = new Array[BitSet](32) + var i = 0 + while (i < 32) { + val x = children(i) + val y = rcs(i) + cs(i) = if (x == null) y else if (y == null) x else x | y + i += 1 + } + Branch(offset, height, cs) + } + + def &(rhs: BitSet): BitSet = + if (this eq rhs) { + this + } else if (height > rhs.height) { + if (rhs.offset < offset || limit <= rhs.offset) { + Empty + } else { + // this branch contains rhs, so find its index + val i = index(rhs.offset) + val c0 = children(i) + if (c0 != null) c0 & rhs else Empty + } + } else if (height < rhs.height) { + // use commuativity to handle this in previous case + rhs & this + } else if (offset != rhs.offset) { + // same height, but non-overlapping + Empty + } else { + // height == rhs.height, so we know rhs is a Branch. + val Branch(_, _, rcs) = rhs + val cs = new Array[BitSet](32) + var i = 0 + var nonEmpty = false + while (i < 32) { + val x = children(i) + val y = rcs(i) + if (x != null && y != null) { + val xy = x & y + if (!(xy eq Empty)) { + nonEmpty = true + cs(i) = xy + } + } + i += 1 + } + if (nonEmpty) Branch(offset, height, cs) + else Empty + } + + def intersects(rhs: BitSet): Boolean = + if (height > rhs.height) { + if (rhs.offset < offset || limit <= rhs.offset) { + false + } else { + // this branch contains rhs, so find its index + val i = index(rhs.offset) + val c0 = children(i) + if (c0 != null) c0.intersects(rhs) else false + } + } else if (height < rhs.height) { + // use commuativity to handle this in previous case + rhs.intersects(this) + } else if (offset != rhs.offset) { + // same height, but non-overlapping + false + } else { + // height == rhs.height, so we know rhs is a Branch. + val Branch(_, _, rcs) = rhs + var i = 0 + while (i < 32) { + val x = children(i) + val y = rcs(i) + if (x != null && y != null && (x.intersects(y))) return true + i += 1 + } + false + } + + def ^(rhs: BitSet): BitSet = + if (this eq rhs) { + // TODO: it is unclear why BitSet.Empty isn't okay here. + // Tests pass if we do it, but it seems a pretty minor optimization + // If we need some invariant, we should have a test for it. + // newEmpty(offset) + // + // a motivation to use Empty here is to avoid always returning + // aligned offsets, which make some of the branches below unreachable + BitSet.Empty + } else if (height > rhs.height) { + if (rhs.offset < offset || limit <= rhs.offset) { + this | rhs + } else { + // this branch contains rhs, so find its index + val i = index(rhs.offset) + val c0 = children(i) + if (c0 != null) { + val cc = c0 ^ rhs + if (c0 eq cc) this else replace(i, cc) + } else { + var cc = rhs + while (cc.height < height - 1) cc = BitSet.parentFor(cc) + replace(i, cc) + } + } + } else if (height < rhs.height) { + // use commuativity to handle this in previous case + rhs ^ this + } else if (offset != rhs.offset) { + // same height, but non-overlapping + this | rhs + } else { + // height == rhs.height, so we know rhs is a Branch. + val Branch(_, _, rcs) = rhs + val cs = new Array[BitSet](32) + var i = 0 + while (i < 32) { + val c0 = children(i) + val c1 = rcs(i) + cs(i) = if (c1 == null) c0 else if (c0 == null) c1 else c0 ^ c1 + i += 1 + } + Branch(offset, height, cs) + } + + def --(rhs: BitSet): BitSet = + rhs match { + case _ if this eq rhs => + Empty + case b @ Branch(_, _, _) if height < b.height => + if (offset < b.offset || b.limit <= offset) this + else { + val c = b.children(b.index(offset)) + if (c == null) this else this -- c + } + case b @ Branch(_, _, _) if height == b.height => + if (offset != b.offset) { + this + } else { + var newChildren: Array[BitSet] = null + var i = 0 + while (i < 32) { + val c0 = children(i) + val c1 = b.children(i) + val cc = if (c0 == null || c1 == null) c0 else c0 -- c1 + if (!(c0 eq cc)) { + if (newChildren == null) { + newChildren = new Array[BitSet](32) + var j = 0 + while (j < i) { + newChildren(j) = children(j) + j += 1 + } + } + newChildren(i) = cc + } else if (newChildren != null) { + newChildren(i) = c0 + } + i += 1 + } + if (newChildren == null) this + else Branch(offset, height, newChildren) + } + case _ /* height > rhs.height */ => + if (rhs.offset < offset || limit <= rhs.offset) { + this + } else { + // this branch contains rhs, so find its index + val i = index(rhs.offset) + val c = children(i) + if (c == null) { + this + } else { + val cc = c -- rhs + if (c eq cc) this else replace(i, cc) + } + } + } + + private[algebird] def +=(n: Int): Unit = { + val i = index(n) + val c0 = children(i) + if (c0 == null) { + val c = newChild(i) + children(i) = c + c += n + } else { + c0 += n + } + } + + private[algebird] def mutableAdd(n: Int): BitSet = { + val i = index(n) + if (valid(i)) { + val c0 = children(i) + if (c0 == null) { + val c = newChild(i) + children(i) = c + c += n + } else { + c0 += n + } + this + } else { + BitSet.adoptedPlus(this, n) + } + } + + private[algebird] def |=(rhs: BitSet): Unit = + if (height > rhs.height) { + if (rhs.offset < offset || limit <= rhs.offset) { + throw InternalError("union outside of branch jurisdiction") + } else { + // this branch contains rhs, so find its index + val i = index(rhs.offset) + val c0 = children(i) + if (c0 == null) { + val c1 = newChild(i) + c1 |= rhs + children(i) = c1 + } else { + c0 |= rhs + } + } + } else if (height < rhs.height) { + throw InternalError("branch too short for union") + } else if (offset != rhs.offset) { + throw InternalError("branch misaligned") + } else { + // height == rhs.height, so we know rhs is a Branch. + val Branch(_, _, rcs) = rhs + var i = 0 + while (i < 32) { + val x = children(i) + val y = rcs(i) + if (x == null) children(i) = y + else if (y != null) x |= rcs(i) + i += 1 + } + } + + // TODO: optimize + def iterator: Iterator[Int] = + children.iterator.flatMap { + case null => Iterator.empty + case c => c.iterator + } + + def reverseIterator: Iterator[Int] = + children.reverseIterator.flatMap { + case null => Iterator.empty + case c => c.reverseIterator + } + + def size: Long = { + var i = 0 + var n = 0L + while (i < 32) { + val c = children(i) + if (c != null) n += c.size + i += 1 + } + n + } + } + + private case class Leaf(offset: Int, private val values: Array[Long]) extends BitSet { + + @inline private[algebird] def limit: Long = offset + 2048L + + @inline private[algebird] def index(n: Int): Int = (n - offset) >>> 6 + @inline private[algebird] def bit(n: Int): Int = (n - offset) & 63 + + def height: Int = 0 + + def apply(n: Int): Boolean = { + val i = index(n) + (0 <= i && i < 32) && (((values(i) >>> bit(n)) & 1) == 1) + } + + def arrayCopy: Array[Long] = { + val vs = new Array[Long](32) + System.arraycopy(values, 0, vs, 0, 32) + vs + } + + def +(n: Int): BitSet = { + val i = index(n) + if (0 <= i && i < 32) { + val mask = 1L << bit(n) + val vsi = values(i) + if ((vsi & mask) == 1L) this + else { + val vs = arrayCopy + vs(i) = vsi | mask + Leaf(offset, vs) + } + } else { + BitSet.adoptedPlus(this, n) + } + } + + def -(n: Int): BitSet = { + val i = index(n) + if (i < 0 || 32 <= i) { + this + } else { + val mask = 1L << bit(n) + val vsi = values(i) + if ((vsi & mask) == 0L) this + else { + val vs = arrayCopy + vs(i) = vsi & (~mask) + Leaf(offset, vs) + } + } + } + + def isEmpty: Boolean = { + var idx = 0 + while (idx < values.length) { + val empty = values(idx) == 0L + if (!empty) return false + idx += 1 + } + true + } + + def size: Long = { + var c = 0L + var i = 0 + while (i < 32) { + c += bitCount(values(i)) + i += 1 + } + c + } + + def |(rhs: BitSet): BitSet = + rhs match { + case Leaf(`offset`, values2) => + val vs = new Array[Long](32) + var i = 0 + while (i < 32) { + vs(i) = values(i) | values2(i) + i += 1 + } + Leaf(offset, vs) + case _ => + // TODO: this is the only branch where + // we could have overlapping positions. + // if we could be more careful we could + // allow some mutations in adoptedUnion + // since we know we never mutate the + // overlapping part. + BitSet.adoptedUnion(this, rhs) + } + + def &(rhs: BitSet): BitSet = + rhs match { + case Leaf(o, values2) => + if (this eq rhs) { + this + } else if (o != offset) { + Empty + } else { + val vs = new Array[Long](32) + var i = 0 + while (i < 32) { + vs(i) = values(i) & values2(i) + i += 1 + } + Leaf(offset, vs) + } + case Branch(_, _, _) => + rhs & this + } + + def intersects(rhs: BitSet): Boolean = + rhs match { + case Leaf(o, values2) => + if (o != offset) { + false + } else { + var i = 0 + while (i < 32) { + if ((values(i) & values2(i)) != 0L) return true + i += 1 + } + false + } + case Branch(_, _, _) => + rhs.intersects(this) + } + + def ^(rhs: BitSet): BitSet = + rhs match { + case Leaf(o, values2) => + if (this eq rhs) { + // TODO: it is unclear why BitSet.Empty isn't okay here. + // Tests pass if we do it, but it seems a pretty minor optimization + // If we need some invariant, we should have a test for it. + // newEmpty(offset) + // + // a motivation to use Empty here is to avoid always returning + // aligned offsets, which make some of the branches below unreachable + BitSet.Empty + } else if (o != offset) { + this | rhs + } else { + val vs = new Array[Long](32) + var i = 0 + while (i < 32) { + vs(i) = values(i) ^ values2(i) + i += 1 + } + Leaf(offset, vs) + } + case Branch(_, _, _) => + rhs ^ this + } + + def --(rhs: BitSet): BitSet = + rhs match { + case Leaf(o, values2) => + if (o != offset) { + this + } else { + val vs = new Array[Long](32) + var i = 0 + while (i < 32) { + vs(i) = values(i) & (~values2(i)) + i += 1 + } + Leaf(offset, vs) + } + case b @ Branch(_, _, _) => + val j = b.index(offset) + if (0 <= j && j < 32) { + val c = b.children(j) + if (c == null) this else this -- c + } else { + this + } + } + + private[algebird] def +=(n: Int): Unit = { + val i = index(n) + val j = bit(n) + values(i) |= (1L << j) + } + + private[algebird] def mutableAdd(n: Int): BitSet = { + val i = index(n) + if (0 <= i && i < 32) { + values(i) |= (1L << bit(n)) + this + } else { + BitSet.adoptedPlus(this, n) + } + } + + private[algebird] def |=(rhs: BitSet): Unit = + rhs match { + case Leaf(`offset`, values2) => + var i = 0 + while (i < 32) { + values(i) |= values2(i) + i += 1 + } + case _ => + throw InternalError("illegal leaf union") + } + + def iterator: Iterator[Int] = + new LeafIterator(offset, values) + + def reverseIterator: Iterator[Int] = + new LeafReverseIterator(offset, values) + } + + implicit val orderingForBitSet: Ordering[BitSet] = new Ordering[BitSet] { + override def compare(x: BitSet, y: BitSet): Int = { + val itx = x.iterator + val ity = y.iterator + while (itx.hasNext && ity.hasNext) { + val c = Integer.compare(itx.next(), ity.next()) + if (c != 0) return c + } + if (itx.hasNext) 1 + else if (ity.hasNext) -1 + else 0 + } + } + + /** + * Efficient, low-level iterator for BitSet.Leaf values. + * + * As mentioned in `BitSet.iterator`, this method will return values + * in unsigned order (e.g. Int.MaxValue comes before Int.MinValue). + */ + private class LeafIterator(offset: Int, values: Array[Long]) extends Iterator[Int] { + var i: Int = 0 + var x: Long = values(0) + var n: Int = offset + + @tailrec private def search(): Unit = + if (x == 0 && i < 31) { + i += 1 + n = offset + i * 64 + x = values(i) + search() + } else () + + private def advance(): Unit = { + x = x >>> 1 + n += 1 + search() + } + + search() + + def hasNext: Boolean = x != 0 + + def next(): Int = { + while (x != 0 && (x & 1) == 0) advance() + if (x == 0) throw new NoSuchElementException("next on empty iterator") + val res = n + advance() + res + } + } + + /** + * Efficient, low-level reversed iterator for BitSet.Leaf values. + * + * This class is very similar to LeafIterator but returns values in + * the reverse order. + */ + private class LeafReverseIterator(offset: Int, values: Array[Long]) extends Iterator[Int] { + var i: Int = 31 + var x: Long = values(31) + var n: Int = offset + (i + 1) * 64 - 1 + + @tailrec private def search(): Unit = + if (x == 0 && i > 0) { + i -= 1 + n = offset + (i + 1) * 64 - 1 + x = values(i) + search() + } else () + + private def advance(): Unit = { + x = x << 1 + n -= 1 + search() + } + + search() + + def hasNext: Boolean = x != 0 + + def next(): Int = { + while (x > 0) advance() + if (x == 0) throw new NoSuchElementException("next on empty iterator") + val res = n + advance() + res + } + } +} + +// scalastyle:on diff --git a/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala new file mode 100644 index 000000000..af1c1fa57 --- /dev/null +++ b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala @@ -0,0 +1,432 @@ +/* +Copyright 2020 Twitter, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + */ + +package com.twitter.algebird.immutable + +import algebra.BoundedSemilattice +import com.twitter.algebird.{Approximate, ApproximateBoolean, Hash128, Monoid, MonoidAggregator} + +import scala.collection.compat._ +import scala.util._ + +object BloomFilter { + + final def apply[A](numEntries: Int, fpProb: Double)(implicit hash: Hash128[A]): BloomFilter[A] = + BloomFilter.optimalWidth(numEntries, fpProb) match { + case None => + throw new java.lang.IllegalArgumentException( + s"BloomFilter cannot guarantee the specified false positive probability for the number of entries! (numEntries: $numEntries, fpProb: $fpProb)" + ) + case Some(width) => + val numHashes = BloomFilter.optimalNumHashes(numEntries, width) + new BloomFilter(numHashes, width) + } + + // Compute optimal number of hashes: k = m/n ln(2) + def optimalNumHashes(numEntries: Int, width: Int): Int = + math.ceil(width / numEntries * math.log(2)).toInt + + // Compute optimal width: m = - n ln(p) / (ln(2))^2 + // return None if we can't meet this false positive probability + def optimalWidth(numEntries: Int, fpProb: Double): Option[Int] = { + val widthEstimate = math + .ceil(-1 * numEntries * math.log(fpProb) / math.log(2) / math.log(2)) + .toInt + + if (widthEstimate == Int.MaxValue) None + else Some(widthEstimate) + } + + /** + * Cardinality estimates are taken from Theorem 1 on page 15 of + * "Cardinality estimation and dynamic length adaptation for Bloom filters" + * by Papapetrou, Siberski, and Nejdl: + * http://www.softnet.tuc.gr/~papapetrou/publications/Bloomfilters-DAPD.pdf + * + * Roughly, by using bounds on the expected number of true bits after n elements + * have been inserted into the Bloom filter, we can go from the actual number of + * true bits (which is known) to an estimate of the cardinality. + * + * approximationWidth defines an interval around the maximum-likelihood cardinality + * estimate. Namely, the approximation returned is of the form + * (min, estimate, max) = + * ((1 - approxWidth) * estimate, estimate, (1 + approxWidth) * estimate) + */ + def sizeEstimate( + numBits: Int, + numHashes: Int, + width: Int, + approximationWidth: Double = 0.05 + ): Approximate[Long] = { + assert(0 <= approximationWidth && approximationWidth < 1, "approximationWidth must lie in [0, 1)") + + /** + * s(n) is the expected number of bits that have been set to true after + * n elements have been inserted into the Bloom filter. + * This is \hat{S}(n) in the cardinality estimation paper used above. + */ + def s(n: Int): Double = + width * (1 - scala.math.pow(1 - 1.0 / width, numHashes * n)) + + /** + * sInverse(t) is the maximum likelihood value for the number of elements + * that have been inserted into the Bloom filter when it has t bits set to true. + * This is \hat{S}^{-1}(t) in the cardinality estimation paper used above. + */ + def sInverse(t: Int): Double = + scala.math.log1p(-t.toDouble / width) / (numHashes * scala.math.log1p(-1.0 / width)) + + // Variable names correspond to those used in the paper. + val t = numBits + val n = sInverse(t).round.toInt + // Take the min and max because the probability formula assumes + // nl <= sInverse(t - 1) and sInverse(t + 1) <= nr + val nl = + scala.math.min(sInverse(t - 1).floor, (1 - approximationWidth) * n).toInt + val nr = + scala.math.max(sInverse(t + 1).ceil, (1 + approximationWidth) * n).toInt + val prob = + 1 - + scala.math.exp(t - 1 - s(nl)) * + scala.math.pow(s(nl) / (t - 1), t - 1) - + scala.math.exp(-scala.math.pow(t + 1 - s(nr), 2) / (2 * s(nr))) + + Approximate[Long](nl, n, nr, scala.math.max(0, prob)) + } +} + +/** + * Bloom Filter - a probabilistic data structure to test presence of an element. + * + * Operations + * 1) insert: hash the value k times, updating the bitfield at the index equal to each hashed value + * 2) query: hash the value k times. If there are k collisions, then return true; otherwise false. + * + * http://en.wikipedia.org/wiki/Bloom_filter + */ +final case class BloomFilter[A](numHashes: Int, width: Int)(implicit val hash: Hash128[A]) { self => + + /** + * Hash the value `numHashes` times and return an array of indices of true bits in the [[BitSet]]. + */ + def hashToArray(s: A): Array[Int] = { + val target = new Array[Int](numHashes) + hashToArray(s, target) + target + } + + private def hashToArray(s: A, target: Array[Int]): Unit = + nextHash(s, 0, new Array[Int](4), 4, target) + + private def splitLong(x: Long, buffer: Array[Int], idx: Int): Unit = { + // unfortunately, this is the function we committed to some time ago, and we have tests + // locking it down. x.toInt & 0x7fffffff should work, but this gives a few different values + def toNonNegativeInt(x: Long): Int = + (math + .abs(x) + .toInt) & 0x7fffffff // no change for positive numbers, converts Integer.MIN_VALUE to positive number + + val upper = toNonNegativeInt(x >> 32) + val lower = toNonNegativeInt((x << 32) >> 32) + buffer(idx) = upper + buffer(idx + 1) = lower + } + + @annotation.tailrec + private def nextHash( + valueToHash: A, + hashIndex: Int, + buffer: Array[Int], + bidx: Int, + target: Array[Int] + ): Unit = + if (hashIndex != numHashes) { + val thisBidx = if (bidx > 3) { + val (a, b) = hash.hashWithSeed((numHashes - hashIndex).toLong, valueToHash) + splitLong(a, buffer, 0) + splitLong(b, buffer, 2) + 0 + } else bidx + + target(hashIndex) = buffer(thisBidx) % width + nextHash(valueToHash, hashIndex + 1, buffer, thisBidx + 1, target) + } + + /** + * Bloom Filter data structure + */ + sealed abstract class Hash extends Serializable { + def numHashes: Int = self.numHashes + + def width: Int = self.width + + /** + * The number of bits set to true in the bloom filter + */ + def numBits: Int + + /** + * Proportion of bits that are set to true. + */ + def density: Double = numBits.toDouble / width + + def ++(other: Hash): Hash + + def +(other: A): Hash + + def |(other: Hash): Hash = this ++ other + + def checkAndAdd(item: A): (Hash, ApproximateBoolean) + + def contains(item: A): ApproximateBoolean = + if (maybeContains(item)) { + // The false positive probability (the probability that the Bloom filter erroneously + // claims that an element x is in the set when x is not) is roughly + // p = (1 - e^(-numHashes * setCardinality / width))^numHashes + // See: http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives + // + // However, the true set cardinality may not be known. From empirical evidence, though, + // it is upper bounded with high probability by 1.1 * estimatedCardinality (as long as the + // Bloom filter is not too full), so we plug this into the formula instead. + // TODO: investigate this upper bound and density more closely (or derive a better formula). + val fpProb = + if (density > 0.95) + 1.0 // No confidence in the upper bound on cardinality. + else + scala.math.pow(1 - scala.math.exp(-numHashes * size.estimate * 1.1 / width), numHashes) + + ApproximateBoolean(true, 1 - fpProb) + } else { + // False negatives are not possible. + ApproximateBoolean.exactFalse + } + + /** + * This may be faster if you don't care about evaluating + * the false positive probability + */ + def maybeContains(item: A): Boolean + + // Estimates the cardinality of the set of elements that have been + // inserted into the Bloom Filter. + def size: Approximate[Long] + + def toBitSet: BitSet + + /** + * Compute the Hamming distance between the two Bloom filters + * `a` and `b`. The distance is defined as the number of bits that + * need to change to in order to transform one filter into the other. + */ + def hammingDistance(that: Hash): Int = + (this, that) match { + // Comparing with empty filter should give number + // of bits in other set + case (Empty, Empty) => 0 + case (Empty, y: Hash) => y.numBits + case (x: Hash, Empty) => x.numBits + + // Otherwise compare as bit sets + case _ => (this.toBitSet ^ that.toBitSet).size.toInt + } + + } + + case object Empty extends Hash { + override def toBitSet: BitSet = BitSet.empty + + override val numBits: Int = 0 + + override def ++(other: Hash): Hash = other + + override def +(other: A): Item = Item(other) + + override def checkAndAdd(other: A): (Hash, ApproximateBoolean) = + (this + other, ApproximateBoolean.exactFalse) + + override def contains(item: A): ApproximateBoolean = ApproximateBoolean.exactFalse + + override def maybeContains(item: A): Boolean = false + + override def size: Approximate[Long] = Approximate.exact[Long](0) + } + + case class Item(item: A) extends Hash { + override val numBits: Int = numHashes + + override def toBitSet: BitSet = BitSet(hashToArray(item)) + + override def ++(other: Hash): Hash = + other match { + case Empty => this + case Item(otherItem) => this + otherItem + case _ => other + item + } + + override def +(other: A): Hash = { + val bs = BitSet.newEmpty(0) + val hash = new Array[Int](numHashes) + + hashToArray(item, hash) + bs.mutableAdd(hash) + hashToArray(other, hash) + bs.mutableAdd(hash) + + Instance(bs) + } + + override def checkAndAdd(other: A): (Hash, ApproximateBoolean) = + if (other == item) { + (this, ApproximateBoolean.exactTrue) + } else { + (this + other, ApproximateBoolean.exactFalse) + } + + override def contains(x: A): ApproximateBoolean = ApproximateBoolean.exact(item == x) + + override def maybeContains(x: A): Boolean = item == x + + override def size: Approximate[Long] = Approximate.exact[Long](1) + } + + /* + * Bloom filter with 1 or more [[BitSet]]. + */ + case class Instance(bits: BitSet) extends Hash { + + /** + * The number of bits set to true + */ + override def numBits: Int = bits.size.toInt + + override val toBitSet: BitSet = bits + + override def ++(other: Hash): Hash = + other match { + case Empty => this + case Item(item) => this + item + case Instance(otherBits) => Instance(bits | otherBits) + } + + override def +(item: A): Hash = Instance(bits | BitSet(hashToArray(item))) + + override def checkAndAdd(other: A): (Hash, ApproximateBoolean) = + (this + other, contains(other)) + + override def maybeContains(item: A): Boolean = { + val il = hashToArray(item) + var idx = 0 + var found = true + while (idx < il.length && found) { + val i = il(idx) + if (!bits(i)) { + found = false + } + idx += 1 + } + found + } + + // use an approximation width of 0.05 + override def size: Approximate[Long] = + BloomFilter.sizeEstimate(numBits, numHashes, width, 0.05) + } + + implicit val monoid: Monoid[Hash] with BoundedSemilattice[Hash] = + new Monoid[Hash] with BoundedSemilattice[Hash] { + override val zero: Hash = Empty + + /** + * Assume the bloom filters are compatible (same width and same hashing functions). This + * is the union of the 2 bloom filters. + */ + override def plus(left: Hash, right: Hash): Hash = left ++ right + + override def sum(t: TraversableOnce[Hash]): Hash = + if (t.iterator.isEmpty) empty + else { + val iter = t.iterator + var bs = BitSet.newEmpty(0) + val hash = new Array[Int](numHashes) + + while (iter.hasNext) { + iter.next() match { + case Empty => () + case Item(item) => + hashToArray(item, hash) + bs = bs.mutableAdd(hash) + case Instance(bitset) => + val iter = bitset.iterator + while (iter.hasNext) { + bs = bs.mutableAdd(iter.next()) + } + } + } + if (bs.isEmpty) Empty else Instance(bs) + } + + override def sumOption(t: TraversableOnce[Hash]): Option[Hash] = + if (t.iterator.isEmpty) None else Some(sum(t)) + } + + val aggregator: MonoidAggregator[A, Hash, Hash] = new MonoidAggregator[A, Hash, Hash] { + override val monoid: Monoid[Hash] = self.monoid + + override def prepare(value: A): Hash = Item(value) + + override def present(bf: Hash): Hash = bf + } + + implicit val equiv: Equiv[Hash] = new Equiv[Hash] { + override def equiv(a: Hash, b: Hash): Boolean = + (a eq b) || a.toBitSet.equals(b.toBitSet) + } + + /** + * Create a bloom filter with one item. + */ + def create(item: A): Hash = Item(item) + + /** + * Create a bloom filter with multiple items. + */ + def create(data: A*): Hash = create(data.iterator) + + /** + * Create a bloom filter with multiple items from an iterator + */ + def create(data: Iterator[A]): Hash = monoid.sum(data.map(Item)) + + val empty: Hash = Empty + + /** + * Attempts to create a new BloomFilter instance from a [[BitSet]]. Failure might occur + * if the BitSet has a maximum entry behond the BloomFilter expected size. + * + * This method will be helpfull on BloomFilter desirialization. Serialization is achieved + * through the serialization of the underlying [[BitSet]]. + */ + def fromBitSet(bitSet: BitSet): Try[Hash] = + if (bitSet.isEmpty) { + Success(empty) + } else { + if (bitSet.reverseIterator.next() > width) + Failure(new IllegalArgumentException("BitSet beyond BloomFilter expected size")) + else + Success(Instance(bitSet)) + } + +} diff --git a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala index 60aef7eff..2e7351954 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala @@ -20,7 +20,6 @@ import org.scalacheck.Arbitrary import org.scalacheck.Gen.choose import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec -import scala.collection.immutable class QTreeLaws extends CheckProperties { import BaseProperties._ @@ -36,7 +35,7 @@ class QTreeLaws extends CheckProperties { } class QTreeTest extends AnyWordSpec with Matchers { - def randomList(n: Long): immutable.IndexedSeq[Double] = + def randomList(n: Long): scala.collection.immutable.IndexedSeq[Double] = (1L to n).map(_ => math.random) def buildQTree(k: Int, list: Seq[Double]): QTree[Double] = { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala new file mode 100644 index 000000000..bef08a706 --- /dev/null +++ b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala @@ -0,0 +1,349 @@ +package com.twitter.algebird.immutable + +import org.scalacheck.Prop.{forAll, propBoolean} +import org.scalacheck.{Arbitrary, Gen, Prop, Properties} +import Arbitrary.{arbitrary => arb} + +object BitSetTest extends Properties("BitSet") { + + implicit val arbBitSet: Arbitrary[BitSet] = + Arbitrary { + val recur = Gen.lzy(arbBitSet.arbitrary) + + def onPair(fn: (BitSet, BitSet) => BitSet): Gen[BitSet] = + for { + a <- recur + b <- recur + } yield fn(a, b) + + def onItem(fn: (BitSet, Int) => BitSet): Gen[BitSet] = + for { + a <- recur + b <- arb[Int] + } yield fn(a, b) + + val genOffset: Gen[Int] = + Gen.choose(0, 1 << 16).map { multiple => + 2048 * multiple + } + + // intentionally create top level items without 0 offset + // to exercise more code paths that won't actually be hit in + // real code but are hard to statically prove won't be hit + val emptyOffset: Gen[BitSet] = genOffset.map(BitSet.newEmpty(_)) + + Gen.frequency( + (10, arb[List[Int]].map(xs => BitSet(xs: _*))), + // create a consecutive run: + (10, Gen.sized(max => arb[Int].map(init => BitSet((init until (init + max)): _*)))), + (1, BitSet.empty), + (1, emptyOffset), + (1, onPair(_ | _)), + (1, onPair(_ & _)), + (1, onPair(_ ^ _)), + (1, onPair(_ -- _)), + (1, onItem(_ + _)), + (1, onItem(_ - _)), + (1, recur.map(_.compact)) + ) + } + + property("limit/height consistency") = forAll { (x: BitSet) => + import x.{height, limit, offset} + (limit == (offset + (1L << (5 * height + 11)))) && (limit > offset) + } + + property("(x = y) = (x.toSet = y.toSet)") = forAll { (x: BitSet, y: BitSet) => + val xs = x.toSet + val ys = y.toSet + ((x == y) == (xs == ys)) :| s"($x == $y) == ($xs == $ys)" + } + + property("x.toSet == x.iterator.toSet") = forAll { (x: BitSet) => + (x.toSet == x.iterator.toSet) :| s"$x toSet == iterator.toSet" + } + + property("BitSet(set: _*).toSet = set") = forAll { (ns: Set[Int]) => + val x = BitSet(ns.toList: _*) + x.toSet == ns && ns.forall(x(_)) + } + + property("BitSet(x.toSet: _*) = x") = forAll { (x: BitSet) => + val y = BitSet(x.iterator.toList: _*) + x == y + } + + property("x.iterator.size = x.size") = forAll { (x: BitSet) => + x.iterator.size == x.size + } + + property("(x = y) = (x.## = y.##)") = forAll(Gen.listOfN(100, arb[(BitSet, BitSet)])) { pairs => + // This is only approximately true, but failures are very rare, + // and without something like this its easy to end up with real + // hashing bugs. + def good(x: BitSet, y: BitSet) = (x == y) == (x.## == y.##) + + // collisions should happen less than 5% of the time + pairs.count { case (a, b) => good(a, b) } > 95 + } + + property("x.compact = x") = forAll { (x: BitSet) => + x.compact == x + } + property("x.isEmpty == (x.compact eq BitSet.Empty)") = forAll { (x: BitSet) => + (x.isEmpty == (x.compact eq BitSet.Empty)) :| s"$x isEmpty but not compact to Empty" + } + + property("x.isEmpty = (x.size = 0)") = forAll { (x: BitSet) => + x.isEmpty == (x.size == 0) + } + + property("!x.isEmpty == x.nonEmpty") = forAll { (x: BitSet) => + x.nonEmpty == (!x.isEmpty) + } + + property("BitSet.empty contains nothing") = forAll { (x: Int) => + !BitSet.empty(x) + } + + property("x.iterator.forall(x(_))") = forAll { (x: BitSet) => + x.iterator.forall(x(_)) + } + + property("(x + a)(a)") = forAll { (x: BitSet, a: Int) => + val y = x + a + y(a) :| s"$y(${a})" + } + + property("!(x - a)(a)") = forAll { (x: BitSet, a: Int) => + !(x - a)(a) + } + + property("x + a - a == x - a") = forAll { (x: BitSet, a: Int) => + ((x + a) - a) == (x - a) + } + + property("x + a + a = x + a") = forAll { (x: BitSet, a: Int) => + val once = x + a + (once + a) == once + } + + property("x - a - a = x - a") = forAll { (x: BitSet, a: Int) => + val once = x - a + (once - a) == once + } + + property("x.toSet + a == (x + a).toSet") = forAll { (x: BitSet, a: Int) => + x.toSet + a == (x + a).toSet + } + + property("x.toSet - a == (x - a).toSet") = forAll { (x: BitSet, a: Int) => + x.toSet - a == (x - a).toSet + } + + property("+ is commutative") = forAll { (ns: List[Int]) => + BitSet(ns: _*) == BitSet(ns.reverse: _*) + } + + property("- is commutative") = forAll { (x: BitSet, ns: List[Int]) => + ns.foldLeft(x)(_ - _) == ns.reverse.foldLeft(x)(_ - _) + } + + property("x | x = x") = forAll { (x: BitSet) => + (x | x) == x + } + + property("x | Empty = x") = forAll { (x: BitSet) => + val y = x | BitSet.empty + (y == x) :| s"$y ==\n$x" + } + + property("x | y = y | x") = forAll { (x: BitSet, y: BitSet) => + try { + val lhs = x | y + val rhs = y | x + (lhs == rhs) :| s"$lhs == $rhs" + } catch { case (e: Throwable) => e.printStackTrace; throw e } + } + + property("(x | y) | z = x | (y | z)") = forAll { (x: BitSet, y: BitSet, z: BitSet) => + try { + val lhs = ((x | y) | z).compact + val rhs = (x | (y | z)).compact + (lhs == rhs) :| s"$lhs == $rhs" + } catch { case (e: Throwable) => e.printStackTrace; throw e } + } + + property("(x | y)(z) == x(z) || y(z)") = forAll { (x: BitSet, y: BitSet, z: Int) => + // do apply first in case we mutate erroneously + def law(z: Int): Boolean = + (x(z) || y(z)) == (x | y)(z) + + law(z) && x.iterator.forall(law) && y.iterator.forall(law) + } + + property("x & x = x") = forAll { (x: BitSet) => + val y = x & x + (y == x) :| s"$y ==\n$x" + } + + property("x & Empty = Empty") = forAll { (x: BitSet) => + (x & BitSet.empty) == BitSet.empty + } + + property("x & y = y & x") = forAll { (x: BitSet, y: BitSet) => + (x & y) == (y & x) + } + + property("(x & y) & z = x & (y & z)") = forAll { (x: BitSet, y: BitSet, z: BitSet) => + ((x & y) & z) == (x & (y & z)) + } + + property("(x & y)(z) == x(z) && y(z)") = forAll { (x: BitSet, y: BitSet, z: Int) => + // do apply first in case we mutate erroneously + def law(z: Int): Boolean = + (x(z) && y(z)) == (x & y)(z) + + law(z) && x.iterator.forall(law) && y.iterator.forall(law) + } + + property("(x & (y | z) = (x & y) | (x & z)") = forAll { (x: BitSet, y: BitSet, z: BitSet) => + val lhs = x & (y | z) + val rhs = (x & y) | (x & z) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("x.iterator.toList.reverse = x.reverseIterator.toList") = forAll { (x: BitSet) => + val lhs = x.iterator.toList.reverse + val rhs = x.reverseIterator.toList + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("((x ^ y) ^ z) = (x ^ (y ^ z))") = forAll { (x: BitSet, y: BitSet, z: BitSet) => + val lhs = (x ^ y) ^ z + val rhs = x ^ (y ^ z) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x ^ y) = (y ^ x)") = forAll { (x: BitSet, y: BitSet) => + val lhs = x ^ y + val rhs = y ^ x + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x ^ x) = 0") = forAll { (x: BitSet) => + val lhs = x ^ x + val rhs = BitSet.empty + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x ^ 0) = x") = forAll { (x: BitSet) => + val lhs = x ^ BitSet.empty + val rhs = x + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x ^ y)(n) = x(n) ^ y(n)") = forAll { (x: BitSet, y: BitSet, n: Int) => + // do apply first in case we mutate erroneously + val rhs = x(n) ^ y(n) + val lhs = (x ^ y)(n) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x ^ y) = ((x -- (x & y)) | (y -- (x & y)))") = forAll { (x: BitSet, y: BitSet) => + val xy = x & y + val lhs = x ^ y + val rhs = (x -- xy) | (y -- xy) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x -- y)(n) = x(n) && (!y(n))") = forAll { (x: BitSet, y: BitSet, n: Int) => + // do apply first in case we mutate erroneously + val rhs = x(n) && (!y(n)) + val lhs = (x -- y)(n) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x -- y).toSet = (x.toSet -- y.toSet)") = forAll { (x: BitSet, y: BitSet) => + val lhs = (x -- y).toSet + val rhs = x.toSet -- y.toSet + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("x -- x = 0") = forAll { (x: BitSet) => + val lhs = x -- x + val rhs = BitSet.empty + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("0 -- x = 0") = forAll { (x: BitSet) => + val lhs = BitSet.empty -- x + val rhs = BitSet.empty + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("x -- BitSet(n) = x - n") = forAll { (x: BitSet, n: Int) => + val lhs = x -- BitSet(n) + val rhs = x - n + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("x -- 0 = x") = forAll { (x: BitSet) => + val lhs = x -- BitSet.empty + (lhs == x) :| s"$lhs == $x" + } + + property("x -- y -- y = x -- y") = forAll { (x: BitSet, y: BitSet) => + val lhs = x -- y -- y + val rhs = x -- y + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("test ordering") = forAll { (x: BitSet, y: BitSet) => + val lhs = BitSet.orderingForBitSet.compare(x, y) + val rhs = Ordering.Iterable[Int].compare(x.iterator.toIterable, y.iterator.toIterable) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x intersects y) = (y intersects x)") = forAll { (x: BitSet, y: BitSet) => + val lhs = x.intersects(y) + val rhs = y.intersects(x) + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("(x intersects y) = (x & y).nonEmpty") = forAll { (x: BitSet, y: BitSet) => + val lhs = x.intersects(y) + val rhs = (x & y).nonEmpty + (lhs == rhs) :| s"$lhs == $rhs" + } + + property("we never mutate the original item on +/-") = forAll { (x: BitSet, y: Int) => + def law(x: BitSet, ys: Set[Int], op: String)(fn: (BitSet, Int) => BitSet): Prop = + ys.map { y => + val init = x.iterator.toSet + fn(x, y) + val fin = x.iterator.toSet + (init == fin) :| s"$op for $y caused mutation: init: $init final: $fin" + }.reduce(_ && _) + + // try adding items close to x so they collide on the same lines + law(x, x.iterator.map(_ + 1).toSet + y, "+")(_ + _) && + law(x, x.iterator.map(_ + 1).toSet + y, "-")(_ - _) + } + + property("we never mutate the original item on |, &, ^, --") = forAll { (x: BitSet, y: BitSet) => + def law(a: BitSet, b: BitSet, nm: String)(op: (BitSet, BitSet) => BitSet): Prop = { + val inita = a.iterator.toSet + val initb = b.iterator.toSet + val _ = op(a, b) + ((a.iterator.toSet == inita) :| s"$a was initially $inita before $nm") && + ((b.iterator.toSet == initb) :| s"$b was initially $initb before $nm") + } + + law(x, y, "|")(_ | _) && + law(x, y, "&")(_ & _) && + law(x, y, "^")(_ ^ _) && + law(x, y, "--")(_ -- _) + } + +} diff --git a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala new file mode 100644 index 000000000..bbc0a74f7 --- /dev/null +++ b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala @@ -0,0 +1,459 @@ +package com.twitter.algebird.immutable + +import java.io.{ByteArrayOutputStream, ObjectOutputStream} + +import org.scalacheck.{Arbitrary, Gen} +import org.scalacheck.Prop._ +import org.scalatest.matchers.should.Matchers +import org.scalatest.wordspec.AnyWordSpec +import com.twitter.algebird.{ + ApproximateProperties, + ApproximateProperty, + Bytes, + CheckProperties, + Hash128, + Monoid, + MurmurHash128 +} + +object BloomFilterTestUtils { + def toDense[A](bloomFilter: BloomFilter[A])(bf: bloomFilter.Hash): bloomFilter.Hash = bf match { + case bloomFilter.Item(item) => + val bs = bloomFilter.hashToArray(item).foldLeft(BitSet.empty)(_ + _) + bloomFilter.Instance(bs) + case bfi => bfi + } +} + +class ImmutableBloomFilterLaws extends CheckProperties { + + import com.twitter.algebird.BaseProperties._ + import BloomFilterTestUtils._ + + val bf: BloomFilter[String] = BloomFilter[String](6, 12) + import bf._ + + implicit val bfGen: Arbitrary[bf.Hash] = + Arbitrary { + val item = Gen.choose(0, 10000).map(v => bf.create(v.toString)) + val zero = Gen.const(Monoid.zero) + val sparse = Gen.listOf(item).map { its => + Monoid.sum(its) + } + val dense = Gen.listOf(item).map { its => + toDense(bf)(Monoid.sum(its)) + } + Gen.frequency((1, zero), (5, item), (10, sparse), (10, dense)) + } + + property("BloomFilter is a Monoid") { + commutativeMonoidLaws[bf.Hash] + } + + property("++ is the same as plus") { + forAll((a: bf.Hash, b: bf.Hash) => Equiv[bf.Hash].equiv(a ++ b, Monoid.plus(a, b))) + } + + property("the distance between a filter and itself should be 0") { + forAll((a: bf.Hash) => a.hammingDistance(a) == 0) + } + + property( + "the distance between a filter and an empty filter should be the number of bits" + + "set in the existing filter" + ) { + forAll((a: bf.Hash) => a.hammingDistance(Monoid.zero) == a.numBits) + } + + property("all equivalent filters should have 0 Hamming distance") { + forAll { (a: bf.Hash, b: bf.Hash) => + if (Equiv[bf.Hash].equiv(a, b)) + a.hammingDistance(b) == 0 + else { + val dist = a.hammingDistance(b) + (dist > 0) && (dist <= a.width) + } + } + } + + property("distance between filters should be symmetrical") { + forAll((a: bf.Hash, b: bf.Hash) => a.hammingDistance(b) == b.hammingDistance(a)) + } + + property("+ is the same as adding with create") { + forAll { (a: bf.Hash, b: String) => + Equiv[bf.Hash].equiv(a + b, Monoid.plus(a, bf.create(b))) + } + } + + property("maybeContains is consistent with contains") { + forAll((a: bf.Hash, b: String) => a.maybeContains(b) == a.contains(b).isTrue) + } + + property("after + maybeContains is true") { + forAll((a: bf.Hash, b: String) => (a + b).maybeContains(b)) + } + + property("checkAndAdd works like check the add") { + forAll { (a: bf.Hash, b: String) => + val (next, check) = a.checkAndAdd(b) + val next1 = a + b + + Equiv[bf.Hash].equiv(next, next1) && + (check == a.contains(b)) + } + } + + property("a ++ a = a for BF") { + forAll((a: bf.Hash) => Equiv[bf.Hash].equiv(a ++ a, a)) + } + + property("BF Instance has 1 or more BitSet") { + forAll { (a: bf.Hash) => + a match { + case bf.Instance(bs) => bs.size >= 1 + case _ => true + } + } + } + +} + +class ImmutableBloomFilterHashIndices extends CheckProperties { + + implicit val bf: Arbitrary[BloomFilter[String]] = + Arbitrary { + for { + hashes <- Gen.choose(1, 10) + width <- Gen.choose(100, 5000000) + } yield BloomFilter[String](hashes, width) + } + + property("Indices are non negative") { + forAll((bf: BloomFilter[String], v: Long) => bf.hashToArray(v.toString).forall(e => e >= 0)) + } + + /** + * This is the version of the Hash as of before the "negative values fix" + */ + case class NegativeHash(numHashes: Int, width: Int) { + val size = numHashes + + def apply(s: String): Stream[Int] = nextHash(s.getBytes, numHashes) + + private def splitLong(x: Long) = { + val upper = math.abs(x >> 32).toInt + val lower = math.abs((x << 32) >> 32).toInt + (upper, lower) + } + + private def nextHash(bytes: Array[Byte], k: Int, digested: Seq[Int] = Seq.empty): Stream[Int] = + if (k == 0) + Stream.empty + else { + val d = if (digested.isEmpty) { + val (a, b) = MurmurHash128(k)(bytes) + val (x1, x2) = splitLong(a) + val (x3, x4) = splitLong(b) + Seq(x1, x2, x3, x4) + } else + digested + + Stream.cons(d(0) % width, nextHash(bytes, k - 1, d.drop(1))) + } + } + + implicit val pairOfHashes: Arbitrary[(BloomFilter[String], NegativeHash)] = + Arbitrary { + for { + hashes <- Gen.choose(1, 10) + width <- Gen.choose(100, 5000000) + } yield (BloomFilter[String](hashes, width), NegativeHash(hashes, width)) + } + + property( + "Indices of the two versions of Hashes are the same, unless the first one contains negative index" + ) { + forAll { (pair: (BloomFilter[String], NegativeHash), v: Long) => + val s = v.toString + val (bf, negativeHash) = pair + val indices = negativeHash.apply(s) + (indices == (bf.hashToArray(s).toStream)) || indices.exists(_ < 0) + } + } +} + +class BloomFilterFalsePositives[T: Gen: Hash128](falsePositiveRate: Double) extends ApproximateProperty { + + type Exact = Set[T] + type Approx = BloomFilter[T]#Hash + + type Input = T + type Result = Boolean + + val maxNumEntries = 1000 + + def exactGenerator = + for { + numEntries <- Gen.choose(1, maxNumEntries) + set <- Gen.containerOfN[Set, T](numEntries, implicitly[Gen[T]]) + } yield set + + def makeApproximate(set: Set[T]) = { + val bfMonoid = BloomFilter[T](set.size, falsePositiveRate) + + val values = set.toSeq + bfMonoid.create(values: _*) + } + + def inputGenerator(set: Set[T]) = + for { + randomValues <- Gen.listOfN[T](set.size, implicitly[Gen[T]]) + x <- Gen.oneOf((set ++ randomValues).toSeq) + } yield x + + def exactResult(s: Set[T], t: T) = s.contains(t) + + def approximateResult(bf: BloomFilter[T]#Hash, t: T) = bf.contains(t) +} + +class BloomFilterCardinality[T: Gen: Hash128] extends ApproximateProperty { + + type Exact = Set[T] + type Approx = BloomFilter[T]#Hash + + type Input = Unit + type Result = Long + + val maxNumEntries = 10000 + val falsePositiveRate = 0.01 + + def exactGenerator = + for { + numEntries <- Gen.choose(1, maxNumEntries) + set <- Gen.containerOfN[Set, T](numEntries, implicitly[Gen[T]]) + } yield set + + def makeApproximate(set: Set[T]) = { + val bfMonoid = BloomFilter[T](set.size, falsePositiveRate) + + val values = set.toSeq + bfMonoid.create(values: _*) + } + + def inputGenerator(set: Set[T]) = Gen.const(()) + + def exactResult(s: Set[T], u: Unit) = s.size + def approximateResult(bf: BloomFilter[T]#Hash, u: Unit) = bf.size +} + +class ImmutableBloomFilterProperties extends ApproximateProperties("BloomFilter") { + import ApproximateProperty.toProp + + for (falsePositiveRate <- List(0.1, 0.01, 0.001)) { + property(s"has small false positive rate with false positive rate = $falsePositiveRate") = { + implicit val intGen = Gen.choose(1, 1000) + toProp(new BloomFilterFalsePositives[Int](falsePositiveRate), 50, 50, 0.01) + } + } + + property("approximate cardinality") = { + implicit val intGen = Gen.choose(1, 1000) + toProp(new BloomFilterCardinality[Int], 50, 1, 0.01) + } +} + +class ImmutableBloomFilterTest extends AnyWordSpec with Matchers { + + val RAND = new scala.util.Random + + "BloomFilter" should { + + "be possible to create from an iterator" in { + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val entries = (0 until 100).map(_ => RAND.nextInt.toString) + val bf = bloomFilter.create(entries.iterator) + assert(bf.isInstanceOf[bloomFilter.Hash]) + } + + "be possible to create from a sequence" in { + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val entries = (0 until 100).map(_ => RAND.nextInt.toString) + val bf = bloomFilter.create(entries: _*) + assert(bf.isInstanceOf[bloomFilter.Hash]) + } + + "be possible to create from a BitSet" in { + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val entries = (0 until 100).map(_ => RAND.nextInt.toString) + val bf = bloomFilter.create(entries: _*) + + val instance = bloomFilter.fromBitSet(bf.toBitSet) + assert(instance.isSuccess) + } + + "be possible to create from a empty BitSet" in { + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val instance = bloomFilter.fromBitSet(BitSet.empty) + assert(instance.isSuccess) + } + + "fail to create from a larger BitSet" in { + val bloomFilter = BloomFilter[String](6, 0.01) + val entries = (0 until 6).map(_ => RAND.nextInt.toString) + val bf = bloomFilter.create(entries: _*) + + val instance = BloomFilter[String](6, 0.1).fromBitSet(bf.toBitSet) + assert(instance.isFailure) + } + + "identify all true positives" in { + (0 to 100).foreach { _ => + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + val numEntries = 5 + val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) + val bf = bloomFilter.create(entries: _*) + + entries.foreach { i => + assert(bf.contains(i).isTrue) + } + } + } + + "have small false positive rate" in { + val iter = 10000 + + Seq(0.1, 0.01, 0.001).foreach { fpProb => + val fps = (0 until iter).map { _ => + val numEntries = RAND.nextInt(10) + 1 + + val bfMonoid = BloomFilter[String](numEntries, fpProb) + + val entries = RAND + .shuffle((0 until 1000).toList) + .take(numEntries + 1) + .map(_.toString) + val bf = bfMonoid.create(entries.drop(1): _*) + + if (bf.contains(entries(0)).isTrue) 1.0 else 0.0 + } + + val observedFpProb = fps.sum / fps.size + + // the 2.5 is a fudge factor to make the probability of it low + // in tests + assert(observedFpProb <= 2.5 * fpProb) + } + } + + "approximate cardinality" in { + val bloomFilter = BloomFilter[String](10, 100000) + + Seq(10, 100, 1000, 10000).foreach { exactCardinality => + val items = (1 until exactCardinality).map(_.toString) + val bf = bloomFilter.create(items: _*) + val size = bf.size + + assert(size ~ exactCardinality) + assert(size.min <= size.estimate) + assert(size.max >= size.estimate) + } + } + + "work as an Aggregator" in { + (0 to 10).foreach { _ => + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + import bloomFilter.aggregator + + val numEntries = 5 + val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) + val bf = aggregator(entries) + + entries.foreach(i => assert(bf.contains(i.toString).isTrue)) + } + } + + "not serialize @transient dense Instance" in { + val bloomFilter = BloomFilter[String](10, 0.1) + + def serialize(bf: bloomFilter.Hash): Array[Byte] = { + val stream = new ByteArrayOutputStream() + val out = new ObjectOutputStream(stream) + out.writeObject(bf) + out.close() + stream.close() + stream.toByteArray + } + + val bf = bloomFilter.create((1 until 10).map(_.toString): _*) + val bytesBeforeSizeCalled = Bytes(serialize(bf)) + val beforeSize = bf.size + assert(bf.contains("1").isTrue) + val bytesAfterSizeCalled = Bytes(serialize(bf)) + assert(bytesBeforeSizeCalled.size == bytesAfterSizeCalled.size) + assert(beforeSize == bf.size) + } + + /** + * this test failed before the fix for https://github.com/twitter/algebird/issues/229 + */ + "not have negative hash values" in { + val bf = BloomFilter[String](2, 4752800) + val s = "7024497610539761509" + val index = bf.hashToArray(s).head + + assert(index >= 0) + } + } + + "BloomFilter method `checkAndAdd`" should { + + "be identical to method `+`" in { + (0 to 100).foreach { _ => + val bloomFilter = BloomFilter[String](RAND.nextInt(5) + 1, RAND.nextInt(64) + 32) + import bloomFilter._ + + val numEntries = 5 + val entries = (0 until numEntries).map(_ => RAND.nextInt.toString) + val bf = bloomFilter.create(entries: _*) + entries + .map(entry => (entry, bloomFilter.create(entry))) + .foldLeft((Monoid.zero, Monoid.zero)) { case ((left, leftAlt), (entry, _)) => + val (newLeftAlt, contained) = leftAlt.checkAndAdd(entry) + left.contains(entry) shouldBe contained + (left + entry, newLeftAlt) + } + + entries.foreach(i => assert(bf.contains(i.toString).isTrue)) + } + } + } + + "BloomFilters" should { + "be able to compute Hamming distance to each other" in { + import BloomFilterTestUtils._ + + val bf = BloomFilter[String](3, 64) + + val firstBloomFilter = bf.create(Seq("A").iterator) + val secondBloomFilter = bf.create(Seq("C").iterator) + + val distance1 = firstBloomFilter.hammingDistance(secondBloomFilter) + assert(distance1 === 4) + + val thirdBloomFilter = bf.create(Seq("A", "B", "C").iterator) + // Make it dense to make sure that that case is also covered + // even though these examples are small and thus sparse. + val forthBloomFilter = toDense(bf)(bf.create(Seq("C", "D", "E").iterator)) + + val distance2 = thirdBloomFilter.hammingDistance(forthBloomFilter) + assert(distance2 === 8) + + val emptyBloomFilter = bf.create(Iterator.empty) + val distanceToEmpty = thirdBloomFilter.hammingDistance(emptyBloomFilter) + assert(distanceToEmpty === thirdBloomFilter.numBits) + + } + } + +} From 04abc9ac724e72c6c9c6585904c90ed7f9926f29 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 6 Nov 2020 19:15:38 +0100 Subject: [PATCH 117/306] Update scalacheck to 1.15.1 (#889) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a21b77f75..705d687d5 100644 --- a/build.sbt +++ b/build.sbt @@ -10,7 +10,7 @@ val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.2" val scalaTestPlusVersion = "3.1.0.0-RC2" -val scalacheckVersion = "1.15.0" +val scalacheckVersion = "1.15.1" val scalaCollectionCompat = "2.2.0" val utilVersion = "20.10.0" val sparkVersion = "2.4.7" From 4abd0f958137879fad410e8592af6d50915f6219 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 9 Nov 2020 10:56:03 +0100 Subject: [PATCH 118/306] Update scalatest to 3.2.3 (#890) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 705d687d5..9ace01fa0 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.2" +val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.1" val scalaCollectionCompat = "2.2.0" From 33c935e407a42174c68873312c9584cca1295e14 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 20 Nov 2020 13:43:41 +0100 Subject: [PATCH 119/306] Update sbt to 1.4.3 (#891) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index c19c768d6..947bdd302 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.2 +sbt.version=1.4.3 From 286d55fc0e7461e2d11c1bea43ba4861439e3091 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 21 Nov 2020 14:13:52 +0000 Subject: [PATCH 120/306] Add dependabot config (#893) --- .github/dependabot.yml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..c436f0a6e --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + time: "04:00" + open-pull-requests-limit: 10 From 8b82efc7db5cef89091bd6775a46e91bcc5ff5af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 21 Nov 2020 09:18:03 -1000 Subject: [PATCH 121/306] Bump olafurpg/setup-scala from v7 to v10 (#895) Bumps [olafurpg/setup-scala](https://github.com/olafurpg/setup-scala) from v7 to v10. - [Release notes](https://github.com/olafurpg/setup-scala/releases) - [Commits](https://github.com/olafurpg/setup-scala/compare/v7...a2db88fb5c57396fbc1ff29a4bb0420072a3d6d8) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f39ddeca7..bf11c42d7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - - uses: olafurpg/setup-scala@v7 + - uses: olafurpg/setup-scala@v10 - uses: olafurpg/setup-gpg@v2 - name: Publish ${{ github.ref }} run: sbt ci-release From 678325f6c76d9e961737829091120236a84adbe3 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 22 Nov 2020 19:28:26 +0000 Subject: [PATCH 122/306] Add Github Actions (#898) --- .github/workflows/ci.yml | 79 +++++++++++++++++++++++++++++++++++++++ .travis.yml | 81 ---------------------------------------- 2 files changed, 79 insertions(+), 81 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .travis.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..7d8a36620 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,79 @@ +name: ci +on: [push, pull_request] + +jobs: + checks: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - name: cache SBT + uses: coursier/cache-action@v5 + - name: Java 11 setup + uses: olafurpg/setup-scala@v10 + - run: sbt "; scalafmtCheckAll; scalafmtSbtCheck" "; scalafixEnable; scalafixAll --check" + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - name: cache SBT + uses: coursier/cache-action@v5 + - name: java ${{matrix.java}} setup + uses: olafurpg/setup-scala@v10 + with: + java-version: ${{matrix.java}} + - run: sbt "++${{matrix.scala}} test" + strategy: + matrix: + java: + - 8 + - 11 + scala: + - 2.11.12 + - 2.12.12 + - 2.13.3 + test-coverage: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - name: cache SBT + uses: coursier/cache-action@v5 + - name: java ${{matrix.java}} setup + uses: olafurpg/setup-scala@v10 + - run: | + sbt coverage test coverageReport + bash <(curl -s https://codecov.io/bash) + mimaReport: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - name: cache SBT + uses: coursier/cache-action@v5 + - name: java ${{matrix.java}} setup + uses: olafurpg/setup-scala@v10 + with: + java-version: ${{matrix.java}} + - run: sbt "++${{matrix.scala}} mimaReportBinaryIssues" + strategy: + matrix: + java: + - 8 + - 11 + scala: + - 2.11.12 + - 2.12.12 + microsite: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.3.4 + - name: Ruby setup + uses: actions/setup-ruby@v1.1.2 + with: + ruby-version: 2.6 + - run: | + gem install sass + gem install jekyll -v 3.2.1 + - name: cache SBT + uses: coursier/cache-action@v5 + - name: java ${{matrix.java}} setup + uses: olafurpg/setup-scala@v10 + - run: sbt docs/makeMicrosite diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1230f0266..000000000 --- a/.travis.yml +++ /dev/null @@ -1,81 +0,0 @@ -language: scala -sudo: false -matrix: - include: - - name: checks - scala: 2.11.12 - jdk: openjdk8 - script: sbt \ - "; scalafmtCheckAll; scalafmtSbtCheck" \ - "; scalafixEnable; scalafix --check; test:scalafix --check" - - - scala: 2.11.12 - jdk: openjdk8 - script: sbt \ - coverage \ - "++$TRAVIS_SCALA_VERSION clean" \ - "++$TRAVIS_SCALA_VERSION test" \ - "++$TRAVIS_SCALA_VERSION coverageReport" \ - "++$TRAVIS_SCALA_VERSION mimaReportBinaryIssues" - after_success: - - bash <(curl -s https://codecov.io/bash) - - - scala: 2.11.12 - jdk: openjdk11 - script: sbt \ - "++$TRAVIS_SCALA_VERSION clean" \ - "++$TRAVIS_SCALA_VERSION test" - - - scala: 2.12.12 - jdk: openjdk8 - before_install: - - export PATH=${PATH}:./vendor/bundle - install: - - rvm use 2.6.1 --install --fuzzy - - gem install sass - - gem install jekyll -v 3.2.1 - script: sbt \ - "++$TRAVIS_SCALA_VERSION clean" \ - "++$TRAVIS_SCALA_VERSION test" \ - "++$TRAVIS_SCALA_VERSION docs/makeMicrosite" \ - "++$TRAVIS_SCALA_VERSION mimaReportBinaryIssues" - - - scala: 2.12.12 - jdk: openjdk11 - script: sbt \ - "++$TRAVIS_SCALA_VERSION clean" \ - "++$TRAVIS_SCALA_VERSION test" - - - scala: 2.13.3 - jdk: openjdk8 - script: sbt \ - "++$TRAVIS_SCALA_VERSION clean" \ - "++$TRAVIS_SCALA_VERSION test" - - - scala: 2.13.3 - jdk: openjdk11 - script: sbt \ - "++$TRAVIS_SCALA_VERSION clean" \ - "++$TRAVIS_SCALA_VERSION test" - -cache: - directories: - - $HOME/.cache - - $HOME/.sbt/boot/scala* - - $HOME/.sbt/launchers - - $HOME/.ivy2/cache - - $HOME/.nvm - -before_cache: - - du -h -d 1 $HOME/.ivy2/cache - - du -h -d 2 $HOME/.sbt/ - - find $HOME/.sbt -name "*.lock" -type f -delete - - find $HOME/.ivy2/cache -name "ivydata-*.properties" -type f -delete - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/83f5f34730d7a004992f - on_success: change - on_failure: always - on_start: never From 1842f735d46771d9b9b67c80d28fbc3acb84fb07 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 22 Nov 2020 09:36:02 -1000 Subject: [PATCH 123/306] Bump olafurpg/setup-gpg from v2 to v3 (#896) Bumps [olafurpg/setup-gpg](https://github.com/olafurpg/setup-gpg) from v2 to v3. - [Release notes](https://github.com/olafurpg/setup-gpg/releases) - [Commits](https://github.com/olafurpg/setup-gpg/compare/v2...a62c5903e98d692e520bc2f46103b919a61dd386) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bf11c42d7..9cfdc559d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,7 +9,7 @@ jobs: steps: - uses: actions/checkout@v1 - uses: olafurpg/setup-scala@v10 - - uses: olafurpg/setup-gpg@v2 + - uses: olafurpg/setup-gpg@v3 - name: Publish ${{ github.ref }} run: sbt ci-release env: From cf6d8a14e58567fcb66ebb8712a5678fcc9e0573 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 23 Nov 2020 17:37:20 +0100 Subject: [PATCH 124/306] Update sbt to 1.4.4 (#899) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 947bdd302..7de0a9382 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.3 +sbt.version=1.4.4 From 7cea22239da80fa0a45036906204cde8aae85544 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 23 Nov 2020 17:59:49 +0100 Subject: [PATCH 125/306] Update kind-projector to 0.11.1 (#892) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 9ace01fa0..49b85c46e 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" -val kindProjectorVersion = "0.11.0" +val kindProjectorVersion = "0.11.1" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" From ec9598ee31fb96e4afe14e7898f64822d2f2ae08 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 24 Nov 2020 13:40:15 +0100 Subject: [PATCH 126/306] Update scala-collection-compat to 2.3.0 (#894) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 49b85c46e..2990c7e4d 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.1" -val scalaCollectionCompat = "2.2.0" +val scalaCollectionCompat = "2.3.0" val utilVersion = "20.10.0" val sparkVersion = "2.4.7" From 45c60ae4f0d44c0d940dd8f5dfc9545d2f78fc23 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 25 Nov 2020 11:39:13 +0100 Subject: [PATCH 127/306] Update scala-collection-compat to 2.3.1 (#900) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 2990c7e4d..14662b094 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.1" -val scalaCollectionCompat = "2.3.0" +val scalaCollectionCompat = "2.3.1" val utilVersion = "20.10.0" val sparkVersion = "2.4.7" From 1ff31dc328a43da8caf170128280520ffbc14e21 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 30 Nov 2020 22:34:54 +0100 Subject: [PATCH 128/306] Update sbt-scalafix to 0.9.24 (#901) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index dfeb50446..4be093388 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.23") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.24") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.4") From 33a2aef81c92ff2970b42d96df0e8c803e6bd5cd Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 3 Dec 2020 10:35:19 +0100 Subject: [PATCH 129/306] Update kind-projector to 0.11.2 (#902) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 14662b094..b6e57fed4 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" -val kindProjectorVersion = "0.11.1" +val kindProjectorVersion = "0.11.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" From 1640dd50cbeed5c1399fe86c1ebe71244a904325 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 5 Dec 2020 09:24:19 +0100 Subject: [PATCH 130/306] Update sbt-ci-release to 1.5.5 (#903) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4be093388..550dad77d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,4 +13,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.24") -addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.4") +addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.5") From b7f4d77f8514afed631ff63d652bd6b7a1bcb0b7 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 11 Dec 2020 07:13:27 +0100 Subject: [PATCH 131/306] Update util-core to 20.12.0 (#904) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index b6e57fed4..1991bb908 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.1" val scalaCollectionCompat = "2.3.1" -val utilVersion = "20.10.0" +val utilVersion = "20.12.0" val sparkVersion = "2.4.7" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From aba7e6ded5306e90780b69437bc27cbfd104fd91 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 14 Dec 2020 12:38:40 +0100 Subject: [PATCH 132/306] Update sbt to 1.4.5 (#905) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 7de0a9382..c06db1bb2 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.4 +sbt.version=1.4.5 From 99432ee1b6f790969f4543c75041072cdb1e389f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 21 Dec 2020 12:00:22 +0100 Subject: [PATCH 133/306] Update scala-collection-compat to 2.3.2 (#907) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 1991bb908..20bf31ff4 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.1" -val scalaCollectionCompat = "2.3.1" +val scalaCollectionCompat = "2.3.2" val utilVersion = "20.12.0" val sparkVersion = "2.4.7" From a0c9976bc45fe5fcd7b79e1f2f25f650abdaa96d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 23 Dec 2020 10:43:05 +0100 Subject: [PATCH 134/306] Update scalacheck to 1.15.2 (#906) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 20bf31ff4..81633c031 100644 --- a/build.sbt +++ b/build.sbt @@ -10,7 +10,7 @@ val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" -val scalacheckVersion = "1.15.1" +val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.3.2" val utilVersion = "20.12.0" val sparkVersion = "2.4.7" From aab8e047a1b13a8831d628413bc0a0ec9df3af10 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 28 Dec 2020 22:03:46 +0100 Subject: [PATCH 135/306] Update sbt to 1.4.6 (#909) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index c06db1bb2..d91c272d4 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.5 +sbt.version=1.4.6 From 279a441079f81c6c2e76e33d0dac03bcbf806886 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 15 Jan 2021 19:42:50 +0100 Subject: [PATCH 136/306] Update sbt-scalafix to 0.9.25 (#911) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 550dad77d..ad50a18e4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.24") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.25") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.5") From 0856e87b091bf6123c6aa10ec3e9d1a897272afd Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 16 Jan 2021 01:00:55 +0100 Subject: [PATCH 137/306] Update util-core to 21.1.0 (#912) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 81633c031..a5d55a512 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.3.2" -val utilVersion = "20.12.0" +val utilVersion = "21.1.0" val sparkVersion = "2.4.7" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 52926aaa20df109f109315bfe95daa7291df585c Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 16 Jan 2021 10:53:48 +0000 Subject: [PATCH 138/306] Update kind projector to 0.11.3 (#914) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index a5d55a512..798ecdab6 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.7" -val kindProjectorVersion = "0.11.2" +val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" From 54b7b0ae0cd9e41c38d41b9cf3a5c773b317ef39 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 27 Jan 2021 04:20:10 +0100 Subject: [PATCH 139/306] Update scala-collection-compat to 2.4.0 (#916) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 798ecdab6..3f787d6c0 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.3.2" +val scalaCollectionCompat = "2.4.0" val utilVersion = "21.1.0" val sparkVersion = "2.4.7" From 49f66ca62af8743d6ce20d8049155c877df1c366 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 31 Jan 2021 11:07:36 +0100 Subject: [PATCH 140/306] Update sbt to 1.4.7 (#919) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index d91c272d4..0b2e09c5a 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.6 +sbt.version=1.4.7 From ff8b0e682da8a07cdfd7f8074fe625bfe6dc6155 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 31 Jan 2021 11:08:00 +0100 Subject: [PATCH 141/306] Update scala-collection-compat to 2.4.1 (#918) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 3f787d6c0..f7306d55b 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.4.0" +val scalaCollectionCompat = "2.4.1" val utilVersion = "21.1.0" val sparkVersion = "2.4.7" From 47c37a9cb9e877e181b1c7aa6e106df5d9a30a66 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Mon, 1 Feb 2021 19:34:06 +0000 Subject: [PATCH 142/306] Update docs site (#920) --- build.sbt | 6 ++---- docs/src/main/{tut => mdoc}/cookbook.md | 0 .../main/{tut => mdoc}/cookbook/cookbook.md | 0 docs/src/main/{tut => mdoc}/datatypes.md | 0 .../datatypes/adaptive_matrix.md | 0 .../datatypes/adaptive_vector.md | 0 .../datatypes/affine_function.md | 0 .../main/{tut => mdoc}/datatypes/approx.md | 0 .../datatypes/approx/approximate.md | 0 .../datatypes/approx/bloom_filter.md | 2 +- .../datatypes/approx/countminsketch.md | 8 ++++---- .../datatypes/approx/exponential_histogram.md | 6 +++--- .../datatypes/approx/hyperloglog.md | 16 +++++++-------- .../datatypes/approx/hyperloglog_series.md | 0 .../datatypes/approx/min_hasher.md | 0 .../{tut => mdoc}/datatypes/approx/q_tree.md | 2 +- .../datatypes/approx/space_saver.md | 2 +- .../{tut => mdoc}/datatypes/averaged_value.md | 18 ++++++++--------- .../src/main/{tut => mdoc}/datatypes/bytes.md | 0 .../{tut => mdoc}/datatypes/combinator.md | 0 .../datatypes/combinator/caseclass.md | 0 .../datatypes/combinator/collections.md | 2 +- .../datatypes/combinator/cuber.md | 0 .../datatypes/combinator/eventually.md | 0 .../datatypes/combinator/java_collections.md | 0 .../datatypes/combinator/monoid_statistics.md | 0 .../datatypes/combinator/option_monoid.md | 0 .../datatypes/combinator/priority.md | 0 .../datatypes/combinator/product_algebra.md | 0 .../datatypes/combinator/roller.md | 0 .../{tut => mdoc}/datatypes/decayed_value.md | 4 ++-- .../{tut => mdoc}/datatypes/decayed_vector.md | 0 .../{tut => mdoc}/datatypes/first_and_last.md | 14 ++++++------- .../{tut => mdoc}/datatypes/five_moments.md | 0 .../datatypes/gaussian_distribution.md | 0 .../main/{tut => mdoc}/datatypes/interval.md | 0 .../{tut => mdoc}/datatypes/min_and_max.md | 20 +++++++++---------- .../datatypes/min_plus_algebra.md | 0 .../{tut => mdoc}/datatypes/reset_state.md | 0 .../{tut => mdoc}/datatypes/right_folded.md | 0 .../main/{tut => mdoc}/datatypes/set_diff.md | 0 docs/src/main/{tut => mdoc}/datatypes/sgd.md | 0 .../main/{tut => mdoc}/datatypes/summer.md | 0 .../datatypes/summer/adaptive_cache.md | 0 .../{tut => mdoc}/datatypes/summer/batched.md | 0 .../{tut => mdoc}/datatypes/summer/sum_all.md | 0 .../datatypes/summer/summingcache.md | 0 .../datatypes/summer/summingiterator.md | 0 .../datatypes/summer/summingqueue.md | 0 docs/src/main/{tut => mdoc}/datatypes/topk.md | 0 docs/src/main/{tut => mdoc}/faq.md | 0 docs/src/main/{tut => mdoc}/index.md | 2 +- .../{tut => mdoc}/resources_for_learners.md | 0 docs/src/main/{tut => mdoc}/typeclasses.md | 0 .../typeclasses/abstract_algebra.md | 0 .../{tut => mdoc}/typeclasses/aggregator.md | 0 .../{tut => mdoc}/typeclasses/applicative.md | 0 .../{tut => mdoc}/typeclasses/buffered.md | 0 .../main/{tut => mdoc}/typeclasses/fold.md | 0 .../main/{tut => mdoc}/typeclasses/functor.md | 0 .../main/{tut => mdoc}/typeclasses/group.md | 0 .../main/{tut => mdoc}/typeclasses/hash128.md | 0 .../main/{tut => mdoc}/typeclasses/metric.md | 0 .../main/{tut => mdoc}/typeclasses/monad.md | 0 .../main/{tut => mdoc}/typeclasses/monoid.md | 0 .../typeclasses/predecessible.md | 0 .../{tut => mdoc}/typeclasses/preparer.md | 0 .../main/{tut => mdoc}/typeclasses/ring.md | 0 .../{tut => mdoc}/typeclasses/semigroup.md | 0 .../{tut => mdoc}/typeclasses/successible.md | 0 .../{tut => mdoc}/typeclasses/typeclasses.md | 0 project/plugins.sbt | 2 +- 72 files changed, 51 insertions(+), 53 deletions(-) rename docs/src/main/{tut => mdoc}/cookbook.md (100%) rename docs/src/main/{tut => mdoc}/cookbook/cookbook.md (100%) rename docs/src/main/{tut => mdoc}/datatypes.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/adaptive_matrix.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/adaptive_vector.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/affine_function.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/approx.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/approx/approximate.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/approx/bloom_filter.md (99%) rename docs/src/main/{tut => mdoc}/datatypes/approx/countminsketch.md (94%) rename docs/src/main/{tut => mdoc}/datatypes/approx/exponential_histogram.md (99%) rename docs/src/main/{tut => mdoc}/datatypes/approx/hyperloglog.md (97%) rename docs/src/main/{tut => mdoc}/datatypes/approx/hyperloglog_series.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/approx/min_hasher.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/approx/q_tree.md (99%) rename docs/src/main/{tut => mdoc}/datatypes/approx/space_saver.md (98%) rename docs/src/main/{tut => mdoc}/datatypes/averaged_value.md (96%) rename docs/src/main/{tut => mdoc}/datatypes/bytes.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/caseclass.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/collections.md (98%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/cuber.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/eventually.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/java_collections.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/monoid_statistics.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/option_monoid.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/priority.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/product_algebra.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/combinator/roller.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/decayed_value.md (99%) rename docs/src/main/{tut => mdoc}/datatypes/decayed_vector.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/first_and_last.md (97%) rename docs/src/main/{tut => mdoc}/datatypes/five_moments.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/gaussian_distribution.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/interval.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/min_and_max.md (97%) rename docs/src/main/{tut => mdoc}/datatypes/min_plus_algebra.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/reset_state.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/right_folded.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/set_diff.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/sgd.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer/adaptive_cache.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer/batched.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer/sum_all.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer/summingcache.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer/summingiterator.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/summer/summingqueue.md (100%) rename docs/src/main/{tut => mdoc}/datatypes/topk.md (100%) rename docs/src/main/{tut => mdoc}/faq.md (100%) rename docs/src/main/{tut => mdoc}/index.md (99%) rename docs/src/main/{tut => mdoc}/resources_for_learners.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/abstract_algebra.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/aggregator.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/applicative.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/buffered.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/fold.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/functor.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/group.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/hash128.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/metric.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/monad.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/monoid.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/predecessible.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/preparer.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/ring.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/semigroup.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/successible.md (100%) rename docs/src/main/{tut => mdoc}/typeclasses/typeclasses.md (100%) diff --git a/build.sbt b/build.sbt index f7306d55b..e834fb2c1 100644 --- a/build.sbt +++ b/build.sbt @@ -334,12 +334,10 @@ lazy val docSettings = Seq( "gray-lighter" -> "#F4F3F4", "white-color" -> "#FFFFFF" ), - micrositeCompilingDocsTool := WithTut, autoAPIMappings := true, docsMappingsAPIDir := "api", addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), docsMappingsAPIDir), ghpagesNoJekyll := false, - fork in tut := true, fork in (ScalaUnidoc, unidoc) := true, scalacOptions in (ScalaUnidoc, unidoc) ++= Seq( "-doc-source-url", @@ -355,14 +353,14 @@ lazy val docSettings = Seq( // Documentation is generated for projects defined in // `docsSourcesAndProjects`. lazy val docs = project - .enablePlugins(MicrositesPlugin, TutPlugin, ScalaUnidocPlugin, GhpagesPlugin) + .enablePlugins(MicrositesPlugin, MdocPlugin, ScalaUnidocPlugin, GhpagesPlugin) .settings(moduleName := "algebird-docs") .settings(sharedSettings) .settings(noPublishSettings) .settings(docSettings) .settings( addCompilerPlugin(("org.typelevel" % "kind-projector" % kindProjectorVersion).cross(CrossVersion.full)), - scalacOptions in Tut ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))), + mdocIn := sourceDirectory.value / "main" / "mdoc", sources in (ScalaUnidoc, unidoc) ~= (_.filterNot(_.absolutePath.contains("javaapi"))) ) .dependsOn(algebirdCore) diff --git a/docs/src/main/tut/cookbook.md b/docs/src/main/mdoc/cookbook.md similarity index 100% rename from docs/src/main/tut/cookbook.md rename to docs/src/main/mdoc/cookbook.md diff --git a/docs/src/main/tut/cookbook/cookbook.md b/docs/src/main/mdoc/cookbook/cookbook.md similarity index 100% rename from docs/src/main/tut/cookbook/cookbook.md rename to docs/src/main/mdoc/cookbook/cookbook.md diff --git a/docs/src/main/tut/datatypes.md b/docs/src/main/mdoc/datatypes.md similarity index 100% rename from docs/src/main/tut/datatypes.md rename to docs/src/main/mdoc/datatypes.md diff --git a/docs/src/main/tut/datatypes/adaptive_matrix.md b/docs/src/main/mdoc/datatypes/adaptive_matrix.md similarity index 100% rename from docs/src/main/tut/datatypes/adaptive_matrix.md rename to docs/src/main/mdoc/datatypes/adaptive_matrix.md diff --git a/docs/src/main/tut/datatypes/adaptive_vector.md b/docs/src/main/mdoc/datatypes/adaptive_vector.md similarity index 100% rename from docs/src/main/tut/datatypes/adaptive_vector.md rename to docs/src/main/mdoc/datatypes/adaptive_vector.md diff --git a/docs/src/main/tut/datatypes/affine_function.md b/docs/src/main/mdoc/datatypes/affine_function.md similarity index 100% rename from docs/src/main/tut/datatypes/affine_function.md rename to docs/src/main/mdoc/datatypes/affine_function.md diff --git a/docs/src/main/tut/datatypes/approx.md b/docs/src/main/mdoc/datatypes/approx.md similarity index 100% rename from docs/src/main/tut/datatypes/approx.md rename to docs/src/main/mdoc/datatypes/approx.md diff --git a/docs/src/main/tut/datatypes/approx/approximate.md b/docs/src/main/mdoc/datatypes/approx/approximate.md similarity index 100% rename from docs/src/main/tut/datatypes/approx/approximate.md rename to docs/src/main/mdoc/datatypes/approx/approximate.md diff --git a/docs/src/main/tut/datatypes/approx/bloom_filter.md b/docs/src/main/mdoc/datatypes/approx/bloom_filter.md similarity index 99% rename from docs/src/main/tut/datatypes/approx/bloom_filter.md rename to docs/src/main/mdoc/datatypes/approx/bloom_filter.md index 877c49fb8..cb4fbb3c8 100644 --- a/docs/src/main/tut/datatypes/approx/bloom_filter.md +++ b/docs/src/main/mdoc/datatypes/approx/bloom_filter.md @@ -27,7 +27,7 @@ To read more about Bloom filters see wikipedia: https://en.wikipedia.org/wiki/Bl example usage: -```tut:book +```scala mdoc import com.twitter.algebird._ // It's possible to create a Bloom filter with a set number of diff --git a/docs/src/main/tut/datatypes/approx/countminsketch.md b/docs/src/main/mdoc/datatypes/approx/countminsketch.md similarity index 94% rename from docs/src/main/tut/datatypes/approx/countminsketch.md rename to docs/src/main/mdoc/datatypes/approx/countminsketch.md index c9b862db6..68fa42555 100644 --- a/docs/src/main/tut/datatypes/approx/countminsketch.md +++ b/docs/src/main/mdoc/datatypes/approx/countminsketch.md @@ -1,6 +1,6 @@ --- layout: docs -title: "Count Min Sketch" +title: "Count Min Sketch" section: "data" source: "algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala" scaladoc: "#com.twitter.algebird.CountMinSketch" @@ -12,7 +12,7 @@ Count-min sketch is a probabilistic data structure that estimates the frequencie In Algebird, count-min sketches are represented as the abstract class `CMS`, along with the `CMSMonoid` class. Here's an example usage: -```tut:book +```scala mdoc import com.twitter.algebird._ import CMSHasherImplicits._ val DELTA = 1E-10 @@ -36,7 +36,7 @@ The Count-Min sketch uses `d` (aka `depth`) pair-wise independent hash functions h(x) = [a * x + b (mod p)] (mod m) ``` -As a requirement for using `CMS` you must provide an implicit `CMSHasher[K]` for the type `K` of the items you want to count. Algebird ships with several such implicits for commonly used types `K` such as `Long` and `scala.BigInt`. +As a requirement for using `CMS` you must provide an implicit `CMSHasher[K]` for the type `K` of the items you want to count. Algebird ships with several such implicits for commonly used types `K` such as `Long` and `scala.BigInt`. If your type `K` is not supported out of the box, you have two options: @@ -47,7 +47,7 @@ If your type `K` is not supported out of the box, you have two options: A Sketch Map is a generalized version of the Count-Min Sketch that is an approximation of Map[K, V] that stores reference to top heavy hitters. The Sketch Map can approximate the sums of any summable value that has a monoid. -```tut:book +```scala mdoc:nest val DELTA = 1E-8 val EPS = 0.001 val SEED = 1 diff --git a/docs/src/main/tut/datatypes/approx/exponential_histogram.md b/docs/src/main/mdoc/datatypes/approx/exponential_histogram.md similarity index 99% rename from docs/src/main/tut/datatypes/approx/exponential_histogram.md rename to docs/src/main/mdoc/datatypes/approx/exponential_histogram.md index 629f254b4..b60f55958 100644 --- a/docs/src/main/tut/datatypes/approx/exponential_histogram.md +++ b/docs/src/main/mdoc/datatypes/approx/exponential_histogram.md @@ -23,7 +23,7 @@ The approximate count is guaranteed to be within `conf.epsilon` relative error o Let's set up a bunch of buckets to add into our exponential histogram. Each bucket tracks a delta and a timestamp. This example uses the same number for both, for simplicity. -```tut:book +```scala mdoc import com.twitter.algebird.ExpHist import ExpHist.{ Bucket, Config, Timestamp } @@ -37,7 +37,7 @@ val actualSum = inputs.map(_.size).sum Now we'll configure an instance of `ExpHist` to track the count and add each of our buckets in. -```tut:book +```scala mdoc val epsilon = 0.01 val windowSize = maxTimestamp val eh = ExpHist.empty(Config(epsilon, windowSize)) @@ -48,7 +48,7 @@ val full = inputs.foldLeft(eh) { Now we can query the full exponential histogram and compare the guess to the actual sum: -```tut:book +```scala mdoc val approximateSum = full.guess full.relativeError val maxError = actualSum * full.relativeError diff --git a/docs/src/main/tut/datatypes/approx/hyperloglog.md b/docs/src/main/mdoc/datatypes/approx/hyperloglog.md similarity index 97% rename from docs/src/main/tut/datatypes/approx/hyperloglog.md rename to docs/src/main/mdoc/datatypes/approx/hyperloglog.md index 3d6411fea..91f274295 100644 --- a/docs/src/main/tut/datatypes/approx/hyperloglog.md +++ b/docs/src/main/mdoc/datatypes/approx/hyperloglog.md @@ -1,6 +1,6 @@ --- layout: docs -title: "HyperLogLog" +title: "HyperLogLog" section: "data" source: "algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala" scaladoc: "#com.twitter.algebird.HyperLogLog" @@ -26,7 +26,7 @@ The `HyperLogLogMonoid` class is the simplest way to create HLLs. `HyperLogLogMo The HyperLogLogMonoid constructor takes an Int `bits`, which represents the number of bits of the hash function that the HLL uses. The more bits you use, the more space the HLLs will take up, and the more precise your estimates will be. For a better understanding of the space-to-accuracy trade-off, see [this table](https://github.com/twitter/algebird/blob/develop/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala#L197) or use one of the other strategies mentioned below, which allow you to specify the desired error. -```tut:book +```scala mdoc import com.twitter.algebird._ val hllMonoid = new HyperLogLogMonoid(bits = 4) ``` @@ -37,7 +37,7 @@ HyperLogLogMonoid has a `create` method which takes a hashed element (as a `Arra We can create an HLL containing a list of elements by creating HLLs for each element using the `create` method, and combining the elements using the HyperLogLogMonoid's `sum` method. -```tut:book +```scala mdoc import com.twitter.algebird.HyperLogLog.int2Bytes val data = List(1, 1, 2, 2, 3, 3, 4, 4, 5, 5) val hlls = data.map { hllMonoid.create(_) } @@ -50,7 +50,7 @@ Note that we were able to call `hllMonoid.create` on an `Int` because we importe We can use the `sizeOf` method to estimate the approximate number of distinct elements in the multiset. -```tut:book +```scala mdoc val approxSizeOf = hllMonoid.sizeOf(combinedHLL) ``` @@ -67,7 +67,7 @@ To learn more about the `Array[Byte]` aggregators, see [the source code of Hyper This is an aggregator of type `Aggregator[K, HLL, HLL]`, which means that it builds a `HLL` from a `TraversableOnce` of `K`s. It takes an `error`, which must be a Double in the range (0,1). -```tut:book +```scala mdoc:nest val agg = HyperLogLogAggregator.withErrorGeneric[Int](0.01) val data = List(1, 1, 2, 2, 3, 3, 4, 4, 5, 5) val combinedHll: HLL = agg(data) @@ -77,7 +77,7 @@ val combinedHll: HLL = agg(data) Similar to `withErrorGeneric`, but takes the number of bits as an Int. -```tut:book +```scala mdoc:nest val agg = HyperLogLogAggregator.withBits[Int](9) val data = List(1, 1, 2, 2, 3, 3, 4, 4, 5, 5) val combinedHll: HLL = agg(data) @@ -87,7 +87,7 @@ val combinedHll: HLL = agg(data) This is an aggregator of type `Aggregator[K, HLL, Long]`, which means that it presents a Long value. The Long that it returns is the estimated size of the combined HLL. -```tut:book +```scala mdoc:nest val agg = HyperLogLogAggregator.sizeWithErrorGeneric[Int](0.01) val data = List(1, 1, 2, 2, 3, 3, 4, 4, 5, 5) val approximateSize: Long = agg(data) @@ -95,7 +95,7 @@ val approximateSize: Long = agg(data) ### REPL Tour -```tut:book +```scala mdoc:nest import HyperLogLog._ val hll = new HyperLogLogMonoid(4) val data = List(1, 1, 2, 2, 3, 3, 4, 4, 5, 5) diff --git a/docs/src/main/tut/datatypes/approx/hyperloglog_series.md b/docs/src/main/mdoc/datatypes/approx/hyperloglog_series.md similarity index 100% rename from docs/src/main/tut/datatypes/approx/hyperloglog_series.md rename to docs/src/main/mdoc/datatypes/approx/hyperloglog_series.md diff --git a/docs/src/main/tut/datatypes/approx/min_hasher.md b/docs/src/main/mdoc/datatypes/approx/min_hasher.md similarity index 100% rename from docs/src/main/tut/datatypes/approx/min_hasher.md rename to docs/src/main/mdoc/datatypes/approx/min_hasher.md diff --git a/docs/src/main/tut/datatypes/approx/q_tree.md b/docs/src/main/mdoc/datatypes/approx/q_tree.md similarity index 99% rename from docs/src/main/tut/datatypes/approx/q_tree.md rename to docs/src/main/mdoc/datatypes/approx/q_tree.md index 6847711a0..43aad8932 100644 --- a/docs/src/main/tut/datatypes/approx/q_tree.md +++ b/docs/src/main/mdoc/datatypes/approx/q_tree.md @@ -65,7 +65,7 @@ For example, if we call `qtree.rangeSumBounds(0.1,0.2)` on the first `QTree` des ## REPL Tour -```tut:book +```scala mdoc import com.twitter.algebird._ val data = List(1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8) val seqQTree = data.map { QTree(_) } diff --git a/docs/src/main/tut/datatypes/approx/space_saver.md b/docs/src/main/mdoc/datatypes/approx/space_saver.md similarity index 98% rename from docs/src/main/tut/datatypes/approx/space_saver.md rename to docs/src/main/mdoc/datatypes/approx/space_saver.md index 5215b7ca0..b79ce0adf 100644 --- a/docs/src/main/tut/datatypes/approx/space_saver.md +++ b/docs/src/main/mdoc/datatypes/approx/space_saver.md @@ -13,7 +13,7 @@ also called StreamSummary. ## Serialization You can serialize `SpaceSaver` instances using `fromBytes` and `toBytes`: -```tut:book +```scala mdoc import com.twitter.algebird._ import scala.util.Try diff --git a/docs/src/main/tut/datatypes/averaged_value.md b/docs/src/main/mdoc/datatypes/averaged_value.md similarity index 96% rename from docs/src/main/tut/datatypes/averaged_value.md rename to docs/src/main/mdoc/datatypes/averaged_value.md index 2914ba098..54c91e75f 100644 --- a/docs/src/main/tut/datatypes/averaged_value.md +++ b/docs/src/main/mdoc/datatypes/averaged_value.md @@ -1,6 +1,6 @@ --- layout: docs -title: "Averaged Value" +title: "Averaged Value" section: "data" source: "algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala" scaladoc: "#com.twitter.algebird.AveragedValue" @@ -12,7 +12,7 @@ The `AveragedValue` data structure keeps track of the `count` and `mean` of a st You can build instances of `AveragedValue` from any numeric type: -```tut:book +```scala mdoc import com.twitter.algebird._ val longVal = AveragedValue(3L) @@ -24,33 +24,33 @@ val intVal = AveragedValue(15) Combining instances with `+` generates a new instance by adding the `count`s and averaging the `value`s: -```tut:book +```scala mdoc longVal + doubleVal longVal + doubleVal + intVal ``` You can also add numbers directly to an `AveragedValue` instance: -```tut:book +```scala mdoc longVal + 12 ``` `AveragedValue` is a commutative group. This means you can add instances in any order: -```tut:book +```scala mdoc longVal + doubleVal == doubleVal + doubleVal ``` An `AveragedValue` with a count and value of `0` act as `Monoid.zero`: -```tut:book +```scala mdoc Monoid.zero[AveragedValue] longVal + Monoid.zero[AveragedValue] == longVal ``` Subtracting `AveragedValue`s is the opposite of addition: -```tut:book +```scala mdoc intVal - longVal intVal + doubleVal - doubleVal ``` @@ -81,14 +81,14 @@ val newCount = big.count + small.count `AveragedValue.aggregator` returns an `Aggregator` that uses `AveragedValue` to calculate the mean of all `Double` values in a stream. For example: -```tut:book +```scala mdoc val items = List[Double](1.0, 2.2, 3.3, 4.4, 5.5) AveragedValue.aggregator(items) ``` `AveragedValue.numericAggregator` works the same way for any numeric type: -```tut:book +```scala mdoc:nest val items = List[Int](1, 3, 5, 7) AveragedValue.numericAggregator[Int].apply(items) ``` diff --git a/docs/src/main/tut/datatypes/bytes.md b/docs/src/main/mdoc/datatypes/bytes.md similarity index 100% rename from docs/src/main/tut/datatypes/bytes.md rename to docs/src/main/mdoc/datatypes/bytes.md diff --git a/docs/src/main/tut/datatypes/combinator.md b/docs/src/main/mdoc/datatypes/combinator.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator.md rename to docs/src/main/mdoc/datatypes/combinator.md diff --git a/docs/src/main/tut/datatypes/combinator/caseclass.md b/docs/src/main/mdoc/datatypes/combinator/caseclass.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/caseclass.md rename to docs/src/main/mdoc/datatypes/combinator/caseclass.md diff --git a/docs/src/main/tut/datatypes/combinator/collections.md b/docs/src/main/mdoc/datatypes/combinator/collections.md similarity index 98% rename from docs/src/main/tut/datatypes/combinator/collections.md rename to docs/src/main/mdoc/datatypes/combinator/collections.md index 49e294eac..4fc22f74a 100644 --- a/docs/src/main/tut/datatypes/combinator/collections.md +++ b/docs/src/main/mdoc/datatypes/combinator/collections.md @@ -8,7 +8,7 @@ section: "data" ## Adding and Multiplication -```tut:book +```scala mdoc val data2 = Map(1 -> 1, 2 -> 1) val data1 = Map(1 -> 3, 2 -> 5, 3 -> 7, 5 -> 1) import com.twitter.algebird.Operators._ diff --git a/docs/src/main/tut/datatypes/combinator/cuber.md b/docs/src/main/mdoc/datatypes/combinator/cuber.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/cuber.md rename to docs/src/main/mdoc/datatypes/combinator/cuber.md diff --git a/docs/src/main/tut/datatypes/combinator/eventually.md b/docs/src/main/mdoc/datatypes/combinator/eventually.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/eventually.md rename to docs/src/main/mdoc/datatypes/combinator/eventually.md diff --git a/docs/src/main/tut/datatypes/combinator/java_collections.md b/docs/src/main/mdoc/datatypes/combinator/java_collections.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/java_collections.md rename to docs/src/main/mdoc/datatypes/combinator/java_collections.md diff --git a/docs/src/main/tut/datatypes/combinator/monoid_statistics.md b/docs/src/main/mdoc/datatypes/combinator/monoid_statistics.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/monoid_statistics.md rename to docs/src/main/mdoc/datatypes/combinator/monoid_statistics.md diff --git a/docs/src/main/tut/datatypes/combinator/option_monoid.md b/docs/src/main/mdoc/datatypes/combinator/option_monoid.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/option_monoid.md rename to docs/src/main/mdoc/datatypes/combinator/option_monoid.md diff --git a/docs/src/main/tut/datatypes/combinator/priority.md b/docs/src/main/mdoc/datatypes/combinator/priority.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/priority.md rename to docs/src/main/mdoc/datatypes/combinator/priority.md diff --git a/docs/src/main/tut/datatypes/combinator/product_algebra.md b/docs/src/main/mdoc/datatypes/combinator/product_algebra.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/product_algebra.md rename to docs/src/main/mdoc/datatypes/combinator/product_algebra.md diff --git a/docs/src/main/tut/datatypes/combinator/roller.md b/docs/src/main/mdoc/datatypes/combinator/roller.md similarity index 100% rename from docs/src/main/tut/datatypes/combinator/roller.md rename to docs/src/main/mdoc/datatypes/combinator/roller.md diff --git a/docs/src/main/tut/datatypes/decayed_value.md b/docs/src/main/mdoc/datatypes/decayed_value.md similarity index 99% rename from docs/src/main/tut/datatypes/decayed_value.md rename to docs/src/main/mdoc/datatypes/decayed_value.md index 42ff51cf1..bc25b54e1 100644 --- a/docs/src/main/tut/datatypes/decayed_value.md +++ b/docs/src/main/mdoc/datatypes/decayed_value.md @@ -18,7 +18,7 @@ See the related issue: https://github.com/twitter/algebird/issues/235 Here is the code example for computing a `DecayedValue` average: -```tut:book +```scala mdoc import com.twitter.algebird._ val data = { @@ -41,7 +41,7 @@ data.zipWithIndex.scanLeft(Monoid.zero[DecayedValue]) { (previous, data) => Running the above code in comparison with a simple decayed average: -```tut:book +```scala mdoc data.zipWithIndex.scanLeft(0.0) { (previous, data) => val (value, time) = data val avg = (value + previous * (HalfLife - 1.0)) / HalfLife diff --git a/docs/src/main/tut/datatypes/decayed_vector.md b/docs/src/main/mdoc/datatypes/decayed_vector.md similarity index 100% rename from docs/src/main/tut/datatypes/decayed_vector.md rename to docs/src/main/mdoc/datatypes/decayed_vector.md diff --git a/docs/src/main/tut/datatypes/first_and_last.md b/docs/src/main/mdoc/datatypes/first_and_last.md similarity index 97% rename from docs/src/main/tut/datatypes/first_and_last.md rename to docs/src/main/mdoc/datatypes/first_and_last.md index 20119d9f2..171a849fa 100644 --- a/docs/src/main/tut/datatypes/first_and_last.md +++ b/docs/src/main/mdoc/datatypes/first_and_last.md @@ -8,7 +8,7 @@ section: "data" `First[T]` and `Last[T]` are data structures that keep track of, respectively, the earliest and latest instances of `T` that you've seen. `First[T]` works for any type `T`: -```tut:book +```scala mdoc import com.twitter.algebird.{ First, Last } First(3) + First(2) + First(1) First("a") + First("b") + First("c") @@ -16,7 +16,7 @@ First("a") + First("b") + First("c") As does `Last[T]`: -```tut:book +```scala mdoc Last(3) + Last(2) + Last(1) Last("a") + Last("b") + Last("c") ``` @@ -25,7 +25,7 @@ Last("a") + Last("b") + Last("c") `First[T]` and `Last[T]` are both non-commutative semigroups. For `First[T]`, the `+` function keeps the left input, while `Last[T]`'s `+` implementation keeps the right input. For example, for `First[T]`: -```tut:book +```scala mdoc val first1 = First(1) + First(3) == First(1) val first3 = First(3) + First(1) == First(3) assert(first1 && first3) @@ -33,7 +33,7 @@ assert(first1 && first3) And for `Last[T]`: -```tut:book +```scala mdoc val last3 = Last(1) + Last(3) == Last(3) val last1 = Last(3) + Last(1) == Last(1) assert(last3 && last1) @@ -43,20 +43,20 @@ assert(last3 && last1) Let's use `First[T]` and `Last[T]` to keep track of the first and last username that a Twitter user has followed over the lifetime of their account. First let's define a type alias for `Username`: -```tut:book +```scala mdoc type Username = String ``` To track `First` and `Last` simultaneously we'll use a combinator. As discussed on the [Product Algebra docs page](combinator/product_algebra.html), the `Tuple2[A, B]` semigroup works by separately combining its left and right elements. This means that we can use a pair - a `(First[Username], Last[Username])` - to track both the oldest and most recent twitter username that we've seen. -```tut:book +```scala mdoc def follow(user: Username): (First[Username], Last[Username]) = (First(user), Last(user)) ``` Now let's "add" up a few of these pairs, using the semigroup. First, we'll import Algebird's `Operators._`, which will enrich any semigroup with a `+` method. -```tut:book +```scala mdoc import com.twitter.algebird.Operators._ val follows = follow("sam") + follow("erik") + follow("oscar") + follow("kelley") diff --git a/docs/src/main/tut/datatypes/five_moments.md b/docs/src/main/mdoc/datatypes/five_moments.md similarity index 100% rename from docs/src/main/tut/datatypes/five_moments.md rename to docs/src/main/mdoc/datatypes/five_moments.md diff --git a/docs/src/main/tut/datatypes/gaussian_distribution.md b/docs/src/main/mdoc/datatypes/gaussian_distribution.md similarity index 100% rename from docs/src/main/tut/datatypes/gaussian_distribution.md rename to docs/src/main/mdoc/datatypes/gaussian_distribution.md diff --git a/docs/src/main/tut/datatypes/interval.md b/docs/src/main/mdoc/datatypes/interval.md similarity index 100% rename from docs/src/main/tut/datatypes/interval.md rename to docs/src/main/mdoc/datatypes/interval.md diff --git a/docs/src/main/tut/datatypes/min_and_max.md b/docs/src/main/mdoc/datatypes/min_and_max.md similarity index 97% rename from docs/src/main/tut/datatypes/min_and_max.md rename to docs/src/main/mdoc/datatypes/min_and_max.md index f081a0302..70a499ad5 100644 --- a/docs/src/main/tut/datatypes/min_and_max.md +++ b/docs/src/main/mdoc/datatypes/min_and_max.md @@ -8,7 +8,7 @@ section: "data" `Min[T]` and `Max[T]` are data structures that keep track of, respectively, the minimum and maximum instances of `T` that you've seen. `First[T]` works for any type `T` with an `Ordering[T]` instance: -```tut:book +```scala mdoc import com.twitter.algebird._ Min(3) + Min(2) + Min(1) Min("a") + Min("aaa") + Min("ccccc") // by length @@ -16,7 +16,7 @@ Min("a") + Min("aaa") + Min("ccccc") // by length As does `Max[T]`: -```tut:book +```scala mdoc Max(3) + Max(2) + Max(1) Max("a") + Max("aaa") + Max("ccccc") // by length ``` @@ -25,7 +25,7 @@ Max("a") + Max("aaa") + Max("ccccc") // by length `Min[T]` and `Max[T]` are both commutative semigroups. For `Min[T]`, the `+` function keeps the input with the minimum wrapped instance of `T`, while `Max[T]`'s `+` implementation keeps the maximum input. For example, for `Min[T]`: -```tut:book +```scala mdoc val min1 = Min(1) + Min(100) == Min(1) val min2 = Min(100) + Min(1) == Min(1) assert(min1 && min2) @@ -33,7 +33,7 @@ assert(min1 && min2) And for `Max[T]`: -```tut:book +```scala mdoc val max1 = Max(1) + Max(100) == Max(100) val max2 = Max(100) + Max(1) == Max(100) assert(max1 && max2) @@ -41,7 +41,7 @@ assert(max1 && max2) `Min[T]` forms a monoid on numeric types with an upper bound, like `Int` and `Float`: -```tut:book +```scala mdoc Monoid.zero[Min[Int]] Monoid.zero[Min[Float]] ``` @@ -50,7 +50,7 @@ Since all instances of `T` will be less than or equal to the upper bound. `Max[T]` forms a monoid on types with a *lower* bound. This includes the numeric types as well as collections like `List[T]` and `String`. The monoid instance for these containers compares each `T` element-wise, with the additional notion that "shorter" sequences are smaller. This allows us to use the empty collection as a lower bound. -```tut:book +```scala mdoc Monoid.zero[Max[Int]] Monoid.zero[Max[Float]] Monoid.zero[String] @@ -60,7 +60,7 @@ Monoid.zero[String] Let's have a popularity contest on Twitter. The user with the most followers wins! (We've borrowed this example with thanks from [Michael Noll](https://twitter.com/miguno)'s excellent algebird tutorial, [Of Algebirds, Monoids, Monads, and Other Bestiary for Large-Scale Data Analytics](http://www.michael-noll.com/blog/2013/12/02/twitter-algebird-monoid-monad-for-large-scala-data-analytics)). First, let's write a data structure to represent a pair of username and the user's number of followers: -```tut:book +```scala mdoc case class TwitterUser(val name: String, val numFollowers: Int) extends Ordered[TwitterUser] { def compare(that: TwitterUser): Int = { val c = this.numFollowers - that.numFollowers @@ -71,7 +71,7 @@ case class TwitterUser(val name: String, val numFollowers: Int) extends Ordered[ Now let's create a bunch of `TwitterUser` instances. -```tut:book +```scala mdoc val barackobama = TwitterUser("BarackObama", 40267391) val katyperry = TwitterUser("katyperry", 48013573) val ladygaga = TwitterUser("ladygaga", 40756470) @@ -82,14 +82,14 @@ val taylorswift = TwitterUser("taylorswift13", 37125055) Who's the winner? Since `TwitterUser` defines an `Ordering` by extending `Ordered`, we can find the winner by wrapping each user in `Max` and combining all of the `Max[TwitterUser]` instances with `+`: -```tut:book +```scala mdoc val winner: Max[TwitterUser] = Max(barackobama) + Max(katyperry) + Max(ladygaga) + Max(miguno) + Max(taylorswift) assert(katyperry == winner.get) ``` A similar trick with `Min[TwitterUser]` gives us the loser: -```tut:book +```scala mdoc val loser: Min[TwitterUser] = Min(barackobama) + Min(katyperry) + Min(ladygaga) + Min(miguno) + Min(taylorswift) assert(miguno == loser.get) ``` diff --git a/docs/src/main/tut/datatypes/min_plus_algebra.md b/docs/src/main/mdoc/datatypes/min_plus_algebra.md similarity index 100% rename from docs/src/main/tut/datatypes/min_plus_algebra.md rename to docs/src/main/mdoc/datatypes/min_plus_algebra.md diff --git a/docs/src/main/tut/datatypes/reset_state.md b/docs/src/main/mdoc/datatypes/reset_state.md similarity index 100% rename from docs/src/main/tut/datatypes/reset_state.md rename to docs/src/main/mdoc/datatypes/reset_state.md diff --git a/docs/src/main/tut/datatypes/right_folded.md b/docs/src/main/mdoc/datatypes/right_folded.md similarity index 100% rename from docs/src/main/tut/datatypes/right_folded.md rename to docs/src/main/mdoc/datatypes/right_folded.md diff --git a/docs/src/main/tut/datatypes/set_diff.md b/docs/src/main/mdoc/datatypes/set_diff.md similarity index 100% rename from docs/src/main/tut/datatypes/set_diff.md rename to docs/src/main/mdoc/datatypes/set_diff.md diff --git a/docs/src/main/tut/datatypes/sgd.md b/docs/src/main/mdoc/datatypes/sgd.md similarity index 100% rename from docs/src/main/tut/datatypes/sgd.md rename to docs/src/main/mdoc/datatypes/sgd.md diff --git a/docs/src/main/tut/datatypes/summer.md b/docs/src/main/mdoc/datatypes/summer.md similarity index 100% rename from docs/src/main/tut/datatypes/summer.md rename to docs/src/main/mdoc/datatypes/summer.md diff --git a/docs/src/main/tut/datatypes/summer/adaptive_cache.md b/docs/src/main/mdoc/datatypes/summer/adaptive_cache.md similarity index 100% rename from docs/src/main/tut/datatypes/summer/adaptive_cache.md rename to docs/src/main/mdoc/datatypes/summer/adaptive_cache.md diff --git a/docs/src/main/tut/datatypes/summer/batched.md b/docs/src/main/mdoc/datatypes/summer/batched.md similarity index 100% rename from docs/src/main/tut/datatypes/summer/batched.md rename to docs/src/main/mdoc/datatypes/summer/batched.md diff --git a/docs/src/main/tut/datatypes/summer/sum_all.md b/docs/src/main/mdoc/datatypes/summer/sum_all.md similarity index 100% rename from docs/src/main/tut/datatypes/summer/sum_all.md rename to docs/src/main/mdoc/datatypes/summer/sum_all.md diff --git a/docs/src/main/tut/datatypes/summer/summingcache.md b/docs/src/main/mdoc/datatypes/summer/summingcache.md similarity index 100% rename from docs/src/main/tut/datatypes/summer/summingcache.md rename to docs/src/main/mdoc/datatypes/summer/summingcache.md diff --git a/docs/src/main/tut/datatypes/summer/summingiterator.md b/docs/src/main/mdoc/datatypes/summer/summingiterator.md similarity index 100% rename from docs/src/main/tut/datatypes/summer/summingiterator.md rename to docs/src/main/mdoc/datatypes/summer/summingiterator.md diff --git a/docs/src/main/tut/datatypes/summer/summingqueue.md b/docs/src/main/mdoc/datatypes/summer/summingqueue.md similarity index 100% rename from docs/src/main/tut/datatypes/summer/summingqueue.md rename to docs/src/main/mdoc/datatypes/summer/summingqueue.md diff --git a/docs/src/main/tut/datatypes/topk.md b/docs/src/main/mdoc/datatypes/topk.md similarity index 100% rename from docs/src/main/tut/datatypes/topk.md rename to docs/src/main/mdoc/datatypes/topk.md diff --git a/docs/src/main/tut/faq.md b/docs/src/main/mdoc/faq.md similarity index 100% rename from docs/src/main/tut/faq.md rename to docs/src/main/mdoc/faq.md diff --git a/docs/src/main/tut/index.md b/docs/src/main/mdoc/index.md similarity index 99% rename from docs/src/main/tut/index.md rename to docs/src/main/mdoc/index.md index ac152d2d4..3ecba85c7 100644 --- a/docs/src/main/tut/index.md +++ b/docs/src/main/mdoc/index.md @@ -10,7 +10,7 @@ This code is targeted at building aggregation systems (via [Scalding](https://gi ### What can you do with this code? -```tut:book +```scala mdoc import com.twitter.algebird._ import com.twitter.algebird.Operators._ Map(1 -> Max(2)) + Map(1 -> Max(3)) + Map(2 -> Max(4)) diff --git a/docs/src/main/tut/resources_for_learners.md b/docs/src/main/mdoc/resources_for_learners.md similarity index 100% rename from docs/src/main/tut/resources_for_learners.md rename to docs/src/main/mdoc/resources_for_learners.md diff --git a/docs/src/main/tut/typeclasses.md b/docs/src/main/mdoc/typeclasses.md similarity index 100% rename from docs/src/main/tut/typeclasses.md rename to docs/src/main/mdoc/typeclasses.md diff --git a/docs/src/main/tut/typeclasses/abstract_algebra.md b/docs/src/main/mdoc/typeclasses/abstract_algebra.md similarity index 100% rename from docs/src/main/tut/typeclasses/abstract_algebra.md rename to docs/src/main/mdoc/typeclasses/abstract_algebra.md diff --git a/docs/src/main/tut/typeclasses/aggregator.md b/docs/src/main/mdoc/typeclasses/aggregator.md similarity index 100% rename from docs/src/main/tut/typeclasses/aggregator.md rename to docs/src/main/mdoc/typeclasses/aggregator.md diff --git a/docs/src/main/tut/typeclasses/applicative.md b/docs/src/main/mdoc/typeclasses/applicative.md similarity index 100% rename from docs/src/main/tut/typeclasses/applicative.md rename to docs/src/main/mdoc/typeclasses/applicative.md diff --git a/docs/src/main/tut/typeclasses/buffered.md b/docs/src/main/mdoc/typeclasses/buffered.md similarity index 100% rename from docs/src/main/tut/typeclasses/buffered.md rename to docs/src/main/mdoc/typeclasses/buffered.md diff --git a/docs/src/main/tut/typeclasses/fold.md b/docs/src/main/mdoc/typeclasses/fold.md similarity index 100% rename from docs/src/main/tut/typeclasses/fold.md rename to docs/src/main/mdoc/typeclasses/fold.md diff --git a/docs/src/main/tut/typeclasses/functor.md b/docs/src/main/mdoc/typeclasses/functor.md similarity index 100% rename from docs/src/main/tut/typeclasses/functor.md rename to docs/src/main/mdoc/typeclasses/functor.md diff --git a/docs/src/main/tut/typeclasses/group.md b/docs/src/main/mdoc/typeclasses/group.md similarity index 100% rename from docs/src/main/tut/typeclasses/group.md rename to docs/src/main/mdoc/typeclasses/group.md diff --git a/docs/src/main/tut/typeclasses/hash128.md b/docs/src/main/mdoc/typeclasses/hash128.md similarity index 100% rename from docs/src/main/tut/typeclasses/hash128.md rename to docs/src/main/mdoc/typeclasses/hash128.md diff --git a/docs/src/main/tut/typeclasses/metric.md b/docs/src/main/mdoc/typeclasses/metric.md similarity index 100% rename from docs/src/main/tut/typeclasses/metric.md rename to docs/src/main/mdoc/typeclasses/metric.md diff --git a/docs/src/main/tut/typeclasses/monad.md b/docs/src/main/mdoc/typeclasses/monad.md similarity index 100% rename from docs/src/main/tut/typeclasses/monad.md rename to docs/src/main/mdoc/typeclasses/monad.md diff --git a/docs/src/main/tut/typeclasses/monoid.md b/docs/src/main/mdoc/typeclasses/monoid.md similarity index 100% rename from docs/src/main/tut/typeclasses/monoid.md rename to docs/src/main/mdoc/typeclasses/monoid.md diff --git a/docs/src/main/tut/typeclasses/predecessible.md b/docs/src/main/mdoc/typeclasses/predecessible.md similarity index 100% rename from docs/src/main/tut/typeclasses/predecessible.md rename to docs/src/main/mdoc/typeclasses/predecessible.md diff --git a/docs/src/main/tut/typeclasses/preparer.md b/docs/src/main/mdoc/typeclasses/preparer.md similarity index 100% rename from docs/src/main/tut/typeclasses/preparer.md rename to docs/src/main/mdoc/typeclasses/preparer.md diff --git a/docs/src/main/tut/typeclasses/ring.md b/docs/src/main/mdoc/typeclasses/ring.md similarity index 100% rename from docs/src/main/tut/typeclasses/ring.md rename to docs/src/main/mdoc/typeclasses/ring.md diff --git a/docs/src/main/tut/typeclasses/semigroup.md b/docs/src/main/mdoc/typeclasses/semigroup.md similarity index 100% rename from docs/src/main/tut/typeclasses/semigroup.md rename to docs/src/main/mdoc/typeclasses/semigroup.md diff --git a/docs/src/main/tut/typeclasses/successible.md b/docs/src/main/mdoc/typeclasses/successible.md similarity index 100% rename from docs/src/main/tut/typeclasses/successible.md rename to docs/src/main/mdoc/typeclasses/successible.md diff --git a/docs/src/main/tut/typeclasses/typeclasses.md b/docs/src/main/mdoc/typeclasses/typeclasses.md similarity index 100% rename from docs/src/main/tut/typeclasses/typeclasses.md rename to docs/src/main/mdoc/typeclasses/typeclasses.md diff --git a/project/plugins.sbt b/project/plugins.sbt index ad50a18e4..8b43685e8 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.2.1") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.1") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") From 360b3df5f7f2fe0710cd6a76de80709a13e65d7d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 3 Feb 2021 00:17:39 +0100 Subject: [PATCH 143/306] Update sbt-microsites to 1.3.2 (#921) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8b43685e8..2a92852dd 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.1") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.2") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") From ba3872d0b1552f60948dfae5b7890fc841ef4756 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 6 Feb 2021 17:05:22 +0000 Subject: [PATCH 144/306] Update scala 2.12 and 2.13 (#913) --- .github/workflows/ci.yml | 9 +++++---- README.md | 2 +- build.sbt | 12 ++++++------ 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7d8a36620..5d5250121 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,8 +29,8 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.12 - - 2.13.3 + - 2.12.13 + - 2.13.4 test-coverage: runs-on: ubuntu-latest steps: @@ -40,7 +40,7 @@ jobs: - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v10 - run: | - sbt coverage test coverageReport + sbt ++2.12.12 coverage test coverageReport bash <(curl -s https://codecov.io/bash) mimaReport: runs-on: ubuntu-latest @@ -60,7 +60,8 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.12 + - 2.12.13 + - 2.13.4 microsite: runs-on: ubuntu-latest steps: diff --git a/README.md b/README.md index 2b509b1be..7091008de 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ See the [Algebird website](https://twitter.github.io/algebird) for more informat ```scala > sbt algebird-core/console -Welcome to Scala 2.12.12 (OpenJDK 64-Bit Server VM, Java 11.0.1). +Welcome to Scala 2.12.13 (OpenJDK 64-Bit Server VM, Java 11.0.1). Type in expressions for evaluation. Or try :help. scala> import com.twitter.algebird._ diff --git a/build.sbt b/build.sbt index e834fb2c1..4c5e7b29b 100644 --- a/build.sbt +++ b/build.sbt @@ -39,7 +39,7 @@ crossScalaVersions := Nil val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.12", + scalaVersion := "2.12.13", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, @@ -210,7 +210,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.3", + crossScalaVersions += "2.13.4", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -240,7 +240,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( testOptions in Test ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.3", + crossScalaVersions += "2.13.4", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -271,14 +271,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.3", + crossScalaVersions += "2.13.4", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.3", + crossScalaVersions += "2.13.4", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -295,7 +295,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.3", + crossScalaVersions += "2.13.4", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.3", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 8b1d46efd80e7e996d8b40f7a916d4e14a630d91 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Feb 2021 12:54:50 +0000 Subject: [PATCH 145/306] Bump actions/setup-ruby from v1.1.2 to v1.1.3 (#922) Bumps [actions/setup-ruby](https://github.com/actions/setup-ruby) from v1.1.2 to v1.1.3. - [Release notes](https://github.com/actions/setup-ruby/releases) - [Commits](https://github.com/actions/setup-ruby/compare/v1.1.2...e932e7af67fc4a8fc77bd86b744acd4e42fe3543) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5d5250121..354aed233 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -67,7 +67,7 @@ jobs: steps: - uses: actions/checkout@v2.3.4 - name: Ruby setup - uses: actions/setup-ruby@v1.1.2 + uses: actions/setup-ruby@v1.1.3 with: ruby-version: 2.6 - run: | From 49bd4cb79ace37e68026c20964d1a727ac777017 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 11 Feb 2021 17:57:59 +0100 Subject: [PATCH 146/306] Update util-core to 21.2.0 (#924) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 4c5e7b29b..0199c82b5 100644 --- a/build.sbt +++ b/build.sbt @@ -12,7 +12,7 @@ val scalaTestVersion = "3.2.3" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.1" -val utilVersion = "21.1.0" +val utilVersion = "21.2.0" val sparkVersion = "2.4.7" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = From 3781b98cd95fd2f4cfa2827ee6f996e413de14bd Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 13 Feb 2021 20:55:49 +0100 Subject: [PATCH 147/306] Update junit to 4.13.2 (#925) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0199c82b5..62aedfd8f 100644 --- a/build.sbt +++ b/build.sbt @@ -71,7 +71,7 @@ val sharedSettings = Seq( }, javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), libraryDependencies ++= Seq( - "junit" % "junit" % "4.13.1" % Test, + "junit" % "junit" % "4.13.2" % Test, "com.novocode" % "junit-interface" % "0.11" % Test ), // Publishing options: From b1c79679d401dddf8f9100508f5880ad9b61d974 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 18 Feb 2021 09:56:45 +0100 Subject: [PATCH 148/306] Update scalatest to 3.2.4 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 62aedfd8f..bb8fea758 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.3" +val scalaTestVersion = "3.2.4" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.1" From 34f187b60a6663fb89ef62fbda616bad9b658732 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 17 Feb 2021 03:39:08 +0100 Subject: [PATCH 149/306] Update scala-collection-compat to 2.4.2 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index bb8fea758..8f2a399c6 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.4" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.4.1" +val scalaCollectionCompat = "2.4.2" val utilVersion = "21.2.0" val sparkVersion = "2.4.7" From 93f37fad94ee9272d8194eba3ac653b31b87c243 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 19 Feb 2021 18:09:42 +0100 Subject: [PATCH 150/306] Update scalatest to 3.2.5 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 8f2a399c6..156cac3e0 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.4" +val scalaTestVersion = "3.2.5" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.2" From b69930783ee9daa361921e6fc3a9aecfd263e13a Mon Sep 17 00:00:00 2001 From: enricoap Date: Wed, 24 Feb 2021 22:21:52 +0100 Subject: [PATCH 151/306] Issue #326 - Some HyperLogLog tests are not testing what we think. (#923) * Changed test for jRhoWMatchTest, variable cardinality for input byte arrays * Changed test for jRhoWMatchTest, variable cardinality for input byte arrays * Corrected formatting --- .../com/twitter/algebird/HyperLogLogTest.scala | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index f2bd75dc5..afd61692e 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -91,17 +91,16 @@ class HyperLogLogLaws extends CheckProperties { class jRhoWMatchTest extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { import HyperLogLog._ - implicit val hashGen: Arbitrary[Array[Byte]] = Arbitrary { - Gen.containerOfN[Array, Byte](16, Arbitrary.arbitrary[Byte]) - } - /* For some reason choose in this version of scalacheck - is bugged so I need the suchThat clause */ - implicit val bitsGen: Arbitrary[Int] = Arbitrary { - Gen.choose(4, 31).suchThat(x => x >= 4 && x <= 31) - } + /* Generate input arrays whose size is proportional to the bits (n) */ + val bitsGen: Gen[(Array[Byte], Int)] = for { + bits <- Gen.choose(4, 31) + in <- Gen.containerOfN[Array, Byte](4 * bits, Arbitrary.arbitrary[Byte]) + } yield (in, bits) property("jRhoW matches referenceJRhoW") { - forAll((in: Array[Byte], bits: Int) => assert(jRhoW(in, bits) == ReferenceHyperLogLog.jRhoW(in, bits))) + forAll(bitsGen) { case (in: Array[Byte], bits: Int) => + assert(jRhoW(in, bits) == ReferenceHyperLogLog.jRhoW(in, bits)) + } } } From 052417585e7134f81f06bc2d878e7460767b552e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 27 Feb 2021 21:13:01 +0100 Subject: [PATCH 152/306] Update sbt-scalafix to 0.9.26 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 2a92852dd..350ab88b0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.25") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.26") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.5") From cb41ce4b15cb58dd0220b256c515b625d48f9a0e Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 8 Mar 2021 01:31:57 +0100 Subject: [PATCH 153/306] Update sbt to 1.4.8 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 0b2e09c5a..b5ef6fff3 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.7 +sbt.version=1.4.8 From dae324936f648c857f7ac458b4d43386b553f195 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 8 Mar 2021 09:15:57 +0100 Subject: [PATCH 154/306] Update scalatest to 3.2.6 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 156cac3e0..e28d10fd4 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.5" +val scalaTestVersion = "3.2.6" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.2" From 6b94a1436cc9cdc84549b746749e9c38f487db24 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 11 Mar 2021 13:25:47 +0100 Subject: [PATCH 155/306] Update sbt-ci-release to 1.5.6 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 350ab88b0..6251f5014 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,4 +13,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.26") -addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.5") +addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.6") From 0f445bcf9c472ab34cd52b409c36b4d566e48566 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 10 Mar 2021 08:51:06 +0100 Subject: [PATCH 156/306] Update sbt to 1.4.9 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index b5ef6fff3..dbae93bcf 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.8 +sbt.version=1.4.9 From d84d8139671e84f685233b5a8d47b970281ac844 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Mar 2021 04:03:30 +0000 Subject: [PATCH 157/306] Bump coursier/cache-action from v5 to v6 Bumps [coursier/cache-action](https://github.com/coursier/cache-action) from v5 to v6. - [Release notes](https://github.com/coursier/cache-action/releases) - [Commits](https://github.com/coursier/cache-action/compare/v5...730a6a454f386fff4be026f3e304ee7fe68912ac) Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 354aed233..ec3575f05 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,7 +7,7 @@ jobs: steps: - uses: actions/checkout@v2.3.4 - name: cache SBT - uses: coursier/cache-action@v5 + uses: coursier/cache-action@v6 - name: Java 11 setup uses: olafurpg/setup-scala@v10 - run: sbt "; scalafmtCheckAll; scalafmtSbtCheck" "; scalafixEnable; scalafixAll --check" @@ -16,7 +16,7 @@ jobs: steps: - uses: actions/checkout@v2.3.4 - name: cache SBT - uses: coursier/cache-action@v5 + uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v10 with: @@ -36,7 +36,7 @@ jobs: steps: - uses: actions/checkout@v2.3.4 - name: cache SBT - uses: coursier/cache-action@v5 + uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v10 - run: | @@ -47,7 +47,7 @@ jobs: steps: - uses: actions/checkout@v2.3.4 - name: cache SBT - uses: coursier/cache-action@v5 + uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v10 with: @@ -74,7 +74,7 @@ jobs: gem install sass gem install jekyll -v 3.2.1 - name: cache SBT - uses: coursier/cache-action@v5 + uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v10 - run: sbt docs/makeMicrosite From fce81e55ae539bc85c9b10f158766ab755e9e112 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 23 Mar 2021 11:25:23 +0100 Subject: [PATCH 158/306] Update sbt-ci-release to 1.5.7 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 6251f5014..19be3d268 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,4 +13,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.26") -addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.6") +addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 1d08c7e02726cefa038385a43fae12dc45abd012 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 1 Apr 2021 03:06:09 +0200 Subject: [PATCH 159/306] Update sbt-microsites to 1.3.3 (#939) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 19be3d268..f8c4b212f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.2") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.3") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") From c8364c720ccdadf7007c3312c5d545b9ef269f5e Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 1 Apr 2021 03:06:20 +0200 Subject: [PATCH 160/306] Update scala-collection-compat to 2.4.3 (#938) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index e28d10fd4..e7e23687b 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.6" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.4.2" +val scalaCollectionCompat = "2.4.3" val utilVersion = "21.2.0" val sparkVersion = "2.4.7" From 5dcf3fb389e67ad34b98c5e9f0f08c48e3eb6e27 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 1 Apr 2021 03:06:30 +0200 Subject: [PATCH 161/306] Update sbt-scalafix to 0.9.27 (#937) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index f8c4b212f..8181df18e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.26") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 0fad815d5900f6e6b6e27a0cb367d162f057d59b Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 5 Apr 2021 04:30:01 +0200 Subject: [PATCH 162/306] Update sbt to 1.5.0 (#941) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index dbae93bcf..e67343ae7 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.4.9 +sbt.version=1.5.0 From 4b9446080c5714db6c88c0f173ac760162c556a7 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 9 Apr 2021 20:06:34 +0200 Subject: [PATCH 163/306] Update shapeless to 2.3.4 (#944) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index e7e23687b..5699e20d0 100644 --- a/build.sbt +++ b/build.sbt @@ -297,7 +297,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.4", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.3", + "com.chuusai" %% "shapeless" % "2.3.4", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From c539bec5828fd5c3d3cd96797dc0cb7b2dbe283b Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 1 Apr 2021 09:40:44 +0200 Subject: [PATCH 164/306] Update scalatest to 3.2.7 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 5699e20d0..25e3874da 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.6" +val scalaTestVersion = "3.2.7" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.3" From c094d45bfb608fdd129a4bcab1376dd6827cab41 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 21 Apr 2021 17:32:21 +0200 Subject: [PATCH 165/306] Update scalatest to 3.2.8 (#945) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 25e3874da..002293cf5 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.7" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.7" +val scalaTestVersion = "3.2.8" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.3" From 8dabbe99b4b3d3880f6f2a6a0614775cde1561b8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 26 Apr 2021 17:22:22 +0200 Subject: [PATCH 166/306] Update sbt to 1.5.1 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index e67343ae7..f0be67b9f 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.0 +sbt.version=1.5.1 From 344dadd68c8e79eb8aeaa7099cfa78b45f2935e6 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 27 Apr 2021 17:25:31 +0200 Subject: [PATCH 167/306] Update sbt-microsites to 1.3.4 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8181df18e..7d3f752ec 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.3") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") From c480e74fbe70682ee09080f09f5d629c6061471c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 3 May 2021 19:32:49 +0200 Subject: [PATCH 168/306] Update sbt-scoverage to 1.7.2 (#950) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 7d3f752ec..8fd8a657a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From b8fd9f9a2c7aad53c0ee1a165c14887d2fb54242 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Tue, 4 May 2021 11:20:27 +0200 Subject: [PATCH 169/306] Update sbt-scoverage to 1.7.3 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8fd8a657a..4c8e3315b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.2") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 6d12bdcacedadca4c01528b2ea457a5c851d44a8 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 5 May 2021 19:00:18 +0200 Subject: [PATCH 170/306] Update sbt-mima-plugin to 0.9.0 (#952) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4c8e3315b..afae9481a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") From 44451a9d3e7ac53f2d69a8f8929e4ff867650b6f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 5 May 2021 21:25:20 +0200 Subject: [PATCH 171/306] Update shapeless to 2.3.5 (#953) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 002293cf5..c3a78d0d6 100644 --- a/build.sbt +++ b/build.sbt @@ -297,7 +297,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.4", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.4", + "com.chuusai" %% "shapeless" % "2.3.5", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From c0cbe795fe092a0a3581f8bbcc99869e1003b8f7 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 8 May 2021 03:08:23 +0200 Subject: [PATCH 172/306] Update JavaEWAH to 1.1.8 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index c3a78d0d6..def2b2844 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" -val javaEwahVersion = "1.1.7" +val javaEwahVersion = "1.1.8" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From eba66d432007c37e5eaf105644d2e3f09f31feaa Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 8 May 2021 17:54:43 +0100 Subject: [PATCH 173/306] Update scala to 2.13.5 (#955) --- .github/workflows/ci.yml | 4 ++-- build.sbt | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ec3575f05..bb68f694b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: scala: - 2.11.12 - 2.12.13 - - 2.13.4 + - 2.13.5 test-coverage: runs-on: ubuntu-latest steps: @@ -61,7 +61,7 @@ jobs: scala: - 2.11.12 - 2.12.13 - - 2.13.4 + - 2.13.5 microsite: runs-on: ubuntu-latest steps: diff --git a/build.sbt b/build.sbt index def2b2844..e59a4d077 100644 --- a/build.sbt +++ b/build.sbt @@ -210,7 +210,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.4", + crossScalaVersions += "2.13.5", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -240,7 +240,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( testOptions in Test ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.4", + crossScalaVersions += "2.13.5", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -271,14 +271,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.4", + crossScalaVersions += "2.13.5", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.4", + crossScalaVersions += "2.13.5", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -295,7 +295,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.4", + crossScalaVersions += "2.13.5", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.5", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 5f716467f7733d276bfc1d51e79009adeb16160c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 9 May 2021 19:31:38 +0200 Subject: [PATCH 174/306] Update shapeless to 2.3.6 (#956) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index e59a4d077..df5ea02aa 100644 --- a/build.sbt +++ b/build.sbt @@ -297,7 +297,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.5", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.5", + "com.chuusai" %% "shapeless" % "2.3.6", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From f5c50875fe71c34963fbffb5ef41f2053f8cbe7c Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 10 May 2021 03:50:29 +0200 Subject: [PATCH 175/306] Update sbt to 1.5.2 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index f0be67b9f..19479ba46 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.1 +sbt.version=1.5.2 From 99e0f7e3a394088cc1d6f593da07a3ae103d9a54 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 10 May 2021 19:49:29 +0200 Subject: [PATCH 176/306] Update JavaEWAH to 1.1.9 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index df5ea02aa..67f5ff4fb 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" -val javaEwahVersion = "1.1.8" +val javaEwahVersion = "1.1.9" val kindProjectorVersion = "0.11.3" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From ca125e2e7bf03e7170d8e8259db192b95b11d283 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 10 May 2021 23:36:09 +0200 Subject: [PATCH 177/306] Update sbt-scoverage to 1.8.0 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index afae9481a..5d6c312fc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.3") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 99623c8d9ab87296821c0c0e164d7afc4ab306a0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 12 May 2021 09:02:42 +0200 Subject: [PATCH 178/306] Update sbt-jmh to 0.4.1 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5d6c312fc..7b4725624 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,6 +11,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.0") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.1") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From d2a60d4617a411ea8404c8a75a592c8e8544213f Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 13 May 2021 06:51:17 +0200 Subject: [PATCH 179/306] Update sbt-jmh to 0.4.2 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 7b4725624..65b5873c8 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,6 +11,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.0") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.1") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From e0fe621e9ae46d0988260cfb3ce49562e931bac0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 14 May 2021 03:32:51 +0200 Subject: [PATCH 180/306] Update scala-collection-compat to 2.4.4 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 67f5ff4fb..95779bebb 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.8" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.4.3" +val scalaCollectionCompat = "2.4.4" val utilVersion = "21.2.0" val sparkVersion = "2.4.7" From 970738fcaba73bd9661f28f83a6e2a0fc889cf91 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 13 May 2021 17:57:11 +0200 Subject: [PATCH 181/306] Update sbt-mima-plugin to 0.9.1 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 65b5873c8..ce4d89a1d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") From 3616b6d98b5d03c59714a7a7ccf68c0d665022d0 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Thu, 13 May 2021 15:28:27 +0200 Subject: [PATCH 182/306] Update kind-projector to 0.12.0 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 95779bebb..e7b2f740d 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.9" -val kindProjectorVersion = "0.11.3" +val kindProjectorVersion = "0.12.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.8" From be9f0713de70aed29c4f1cebc28f1ee6b0c1c68b Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 14 May 2021 20:56:09 +0200 Subject: [PATCH 183/306] Update scalatest to 3.2.9 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index e7b2f740d..10d5bdcbf 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.9" val kindProjectorVersion = "0.12.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.8" +val scalaTestVersion = "3.2.9" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.4" From 11493f3c20a17fa014d7c0caf13d9125f152a272 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 15 May 2021 01:45:25 +0100 Subject: [PATCH 184/306] Fix SpaceSaver buffer overflow (#968) --- .../src/main/scala/com/twitter/algebird/SpaceSaver.scala | 4 ++-- .../main/scala/com/twitter/algebird/scalacheck/Gen.scala | 9 +++++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index bc2757570..08fbe1bec 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -40,12 +40,12 @@ object SpaceSaver { val itemAsBytes = tSerializer(item) val itemLength = itemAsBytes.length //1 for the type, 4 for capacity, 4 for itemAsBytes.length - val buffer = new Array[Byte](1 + 4 + 4 + itemAsBytes.length) + val buffer = new Array[Byte](1 + 4 + 4 + itemLength) ByteBuffer .wrap(buffer) .put(1: Byte) .putInt(capacity) - .putInt(itemLength.toByte) + .putInt(itemLength) .put(itemAsBytes) buffer diff --git a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala index c3043f8a9..20e6f08f1 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala @@ -52,18 +52,23 @@ object gen extends ExpHistGen with IntervalGen { m4 <- choose(0, 1e50) } yield new Moments(m0, m1, m2, m3, m4) + private val genLongString: Gen[String] = for { + size <- Gen.choose(100, 300) + str <- Gen.listOfN(size, Gen.alphaChar).map(_.mkString) + } yield str + def genStringSpaceSaver: Gen[SpaceSaver[String]] = Gen.frequency((1, genSSOneSpaceSaver), (10, genSSManySpaceSaver)) def genSSOneSpaceSaver: Gen[SpaceSaver[String]] = for { capacity <- choose(2, 100) - item <- Gen.alphaStr + item <- genLongString } yield SpaceSaver(capacity, item) def genFixedSSOneSpaceSaver: Gen[SpaceSaver[String]] = for { - item <- Gen.alphaStr + item <- genLongString } yield SpaceSaver(10, item) def genSSManySpaceSaver: Gen[SpaceSaver[String]] = From 083852a5b2257f3c3d5d9a6bd2e0ad4cb5e4247c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 15 May 2021 08:00:33 +0200 Subject: [PATCH 185/306] Update spark-core to 2.4.8 (#970) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 10d5bdcbf..d68cbf6c3 100644 --- a/build.sbt +++ b/build.sbt @@ -13,7 +13,7 @@ val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.4.4" val utilVersion = "21.2.0" -val sparkVersion = "2.4.7" +val sparkVersion = "2.4.8" def scalaVersionSpecificFolders(srcBaseDir: java.io.File, scalaVersion: String) = CrossVersion.partialVersion(scalaVersion) match { From 10bfc349456344cbe188df11673b4a96e8a62a33 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Fri, 14 May 2021 21:12:27 +0100 Subject: [PATCH 186/306] Remove sbt deprecated syntax --- build.sbt | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/build.sbt b/build.sbt index d68cbf6c3..66a9e083b 100644 --- a/build.sbt +++ b/build.sbt @@ -45,7 +45,7 @@ val sharedSettings = Seq( Opts.resolver.sonatypeSnapshots, Opts.resolver.sonatypeReleases ), - parallelExecution in Test := true, + Test / parallelExecution := true, scalacOptions ++= Seq( "-unchecked", "-deprecation", @@ -229,17 +229,17 @@ lazy val algebirdCore = module("core").settings( } }, addCompilerPlugin(("org.typelevel" % "kind-projector" % kindProjectorVersion).cross(CrossVersion.full)), - sourceGenerators in Compile += Def.task { - GenTupleAggregators.gen((sourceManaged in Compile).value) + Compile / sourceGenerators += Def.task { + GenTupleAggregators.gen((Compile / sourceManaged).value) }.taskValue, // Scala 2.12's doc task was failing. - sources in (Compile, doc) ~= (_.filterNot(_.absolutePath.contains("javaapi"))), - testOptions in Test := Seq(Tests.Argument(TestFrameworks.JUnit, "-a")) + Compile / doc / sources ~= (_.filterNot(_.absolutePath.contains("javaapi"))), + Test / testOptions := Seq(Tests.Argument(TestFrameworks.JUnit, "-a")) ) lazy val algebirdTest = module("test") .settings( - testOptions in Test ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), + Test / testOptions ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), crossScalaVersions += "2.13.5", libraryDependencies ++= Seq( @@ -336,18 +336,18 @@ lazy val docSettings = Seq( ), autoAPIMappings := true, docsMappingsAPIDir := "api", - addMappingsToSiteDir(mappings in (ScalaUnidoc, packageDoc), docsMappingsAPIDir), + addMappingsToSiteDir(ScalaUnidoc / packageDoc / mappings, docsMappingsAPIDir), ghpagesNoJekyll := false, - fork in (ScalaUnidoc, unidoc) := true, - scalacOptions in (ScalaUnidoc, unidoc) ++= Seq( + ScalaUnidoc / unidoc / fork := true, + ScalaUnidoc / unidoc / scalacOptions ++= Seq( "-doc-source-url", "https://github.com/twitter/algebird/tree/develop€{FILE_PATH}.scala", "-sourcepath", - baseDirectory.in(LocalRootProject).value.getAbsolutePath, + (LocalRootProject / baseDirectory).value.getAbsolutePath, "-diagrams" ), git.remoteRepo := "git@github.com:twitter/algebird.git", - includeFilter in makeSite := "*.html" | "*.css" | "*.png" | "*.jpg" | "*.gif" | "*.js" | "*.swf" | "*.yml" | "*.md" + makeSite / includeFilter := "*.html" | "*.css" | "*.png" | "*.jpg" | "*.gif" | "*.js" | "*.swf" | "*.yml" | "*.md" ) // Documentation is generated for projects defined in @@ -361,6 +361,6 @@ lazy val docs = project .settings( addCompilerPlugin(("org.typelevel" % "kind-projector" % kindProjectorVersion).cross(CrossVersion.full)), mdocIn := sourceDirectory.value / "main" / "mdoc", - sources in (ScalaUnidoc, unidoc) ~= (_.filterNot(_.absolutePath.contains("javaapi"))) + ScalaUnidoc / unidoc / sources ~= (_.filterNot(_.absolutePath.contains("javaapi"))) ) .dependsOn(algebirdCore) From b43c0f30111a67f006221620e597d4c4563bcc1a Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Sat, 15 May 2021 13:09:23 +0200 Subject: [PATCH 187/306] Update kind-projector to 0.13.0 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 66a9e083b..e0199fe2a 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.9" -val kindProjectorVersion = "0.12.0" +val kindProjectorVersion = "0.13.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.9" From b8b98d924e981cde107400e21f32a343ffc268d8 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 17 May 2021 03:00:46 +0200 Subject: [PATCH 188/306] Update shapeless to 2.3.7 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index e0199fe2a..eb7f2f620 100644 --- a/build.sbt +++ b/build.sbt @@ -297,7 +297,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.5", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.6", + "com.chuusai" %% "shapeless" % "2.3.7", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From 3b4d05cb8d476d5dffe416c76a6ea540b05c69e1 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Mon, 17 May 2021 15:01:25 +0200 Subject: [PATCH 189/306] Update scala-library, scala-reflect to 2.13.6 --- build.sbt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index eb7f2f620..ab36de2b3 100644 --- a/build.sbt +++ b/build.sbt @@ -210,7 +210,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.5", + crossScalaVersions += "2.13.6", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -240,7 +240,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( Test / testOptions ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.5", + crossScalaVersions += "2.13.6", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -271,14 +271,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.5", + crossScalaVersions += "2.13.6", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.5", + crossScalaVersions += "2.13.6", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -295,7 +295,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.5", + crossScalaVersions += "2.13.6", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.7", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 1a8a8ee31de3563777faba20d83ea0c05dcf75b5 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Mon, 17 May 2021 22:25:30 +0100 Subject: [PATCH 190/306] Update Github actions (#974) --- .github/workflows/ci.yml | 4 ++-- .github/workflows/release.yml | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bb68f694b..c67eba145 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: scala: - 2.11.12 - 2.12.13 - - 2.13.5 + - 2.13.6 test-coverage: runs-on: ubuntu-latest steps: @@ -61,7 +61,7 @@ jobs: scala: - 2.11.12 - 2.12.13 - - 2.13.5 + - 2.13.6 microsite: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9cfdc559d..14ad8e464 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,7 +9,6 @@ jobs: steps: - uses: actions/checkout@v1 - uses: olafurpg/setup-scala@v10 - - uses: olafurpg/setup-gpg@v3 - name: Publish ${{ github.ref }} run: sbt ci-release env: From adeac65769c7e7eeb7482ce637783f8fc73f5de4 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 20 May 2021 03:56:22 +0200 Subject: [PATCH 191/306] Update sbt-scoverage to 1.8.1 (#976) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index ce4d89a1d..4e70722b4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.0") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From f8e8bc539fed17f242b55078e3f56e636e210b22 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 20 May 2021 23:35:18 +0200 Subject: [PATCH 192/306] Update JavaEWAH to 1.1.11 (#975) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index ab36de2b3..5a7724211 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" -val javaEwahVersion = "1.1.9" +val javaEwahVersion = "1.1.11" val kindProjectorVersion = "0.13.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From 16346673c218e76563bcad864ace61855e962806 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 21 May 2021 01:59:05 +0200 Subject: [PATCH 193/306] Update sbt-scalafix to 0.9.28 (#978) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4e70722b4..48d429a4b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.27") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.28") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From c8b3757a2db08b9fb809160b20fe73e74f40f272 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Fri, 21 May 2021 00:59:14 +0100 Subject: [PATCH 194/306] Fix release-drafter branch (#977) --- .github/workflows/release-drafter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 17fdb961d..ba0916046 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -3,7 +3,7 @@ name: Release Drafter on: push: branches: - - master + - develop jobs: update_release_draft: From c81ee2eb9234ff1a84e5f092fe05ca4715e62a79 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 21 May 2021 17:49:30 +0200 Subject: [PATCH 195/306] Update JavaEWAH to 1.1.12 (#979) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 5a7724211..ab5748b94 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" -val javaEwahVersion = "1.1.11" +val javaEwahVersion = "1.1.12" val kindProjectorVersion = "0.13.0" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From 8e8c93c5e876f351784b9c3e4ef871dd8e301f74 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Wed, 26 May 2021 11:34:35 +0200 Subject: [PATCH 196/306] Update sbt-mima-plugin to 0.9.2 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 48d429a4b..0217da08e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") From cb5d4453195ae3dd1e26c6593a30f19ade110b34 Mon Sep 17 00:00:00 2001 From: Scala Steward Date: Fri, 28 May 2021 23:31:41 +0200 Subject: [PATCH 197/306] Update sbt-scoverage to 1.8.2 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 0217da08e..9d97f65e5 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.1") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.28") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From fdb127b62d789b2e353b4776bbb29f4cbfc56e77 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 30 May 2021 01:18:16 +0200 Subject: [PATCH 198/306] Update scala-library, scala-reflect to 2.12.14 (#981) Co-authored-by: Filipe Regadas --- .github/workflows/ci.yml | 4 ++-- README.md | 2 +- build.sbt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c67eba145..4cfba5d06 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.13 + - 2.12.14 - 2.13.6 test-coverage: runs-on: ubuntu-latest @@ -60,7 +60,7 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.13 + - 2.12.14 - 2.13.6 microsite: runs-on: ubuntu-latest diff --git a/README.md b/README.md index 7091008de..d2b97acb1 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ See the [Algebird website](https://twitter.github.io/algebird) for more informat ```scala > sbt algebird-core/console -Welcome to Scala 2.12.13 (OpenJDK 64-Bit Server VM, Java 11.0.1). +Welcome to Scala 2.12.14 (OpenJDK 64-Bit Server VM, Java 11.0.1). Type in expressions for evaluation. Or try :help. scala> import com.twitter.algebird._ diff --git a/build.sbt b/build.sbt index ab5748b94..3205597ac 100644 --- a/build.sbt +++ b/build.sbt @@ -39,7 +39,7 @@ crossScalaVersions := Nil val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.13", + scalaVersion := "2.12.14", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, From c40d754eb873e24bcf44a78f26711af7d1228a49 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 1 Jun 2021 11:34:12 +0200 Subject: [PATCH 199/306] Update sbt to 1.5.3 (#985) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 19479ba46..67d27a1df 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.2 +sbt.version=1.5.3 From 6852967febe33d6d5dc070726d706d02bb3d4acd Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 1 Jun 2021 11:34:26 +0200 Subject: [PATCH 200/306] Update sbt-jmh to 0.4.3 (#986) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 9d97f65e5..4bf5a1801 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,6 +11,6 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.2") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.28") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From bb84969ef8ddf42143186df89eccdeac1f1069db Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 1 Jun 2021 19:46:14 +0200 Subject: [PATCH 201/306] Update sbt-scalafix to 0.9.29 (#983) * Update sbt-scalafix to 0.9.29 * Update sbt-scalafix to 0.9.29 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4bf5a1801..bf5a59dd6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.28") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.29") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 32a7309a1e42cec524048ff30c02f6203245a121 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 08:58:32 -1000 Subject: [PATCH 202/306] Bump olafurpg/setup-scala from 10 to 12 (#987) Bumps [olafurpg/setup-scala](https://github.com/olafurpg/setup-scala) from 10 to 12. - [Release notes](https://github.com/olafurpg/setup-scala/releases) - [Commits](https://github.com/olafurpg/setup-scala/compare/v10...v12) --- updated-dependencies: - dependency-name: olafurpg/setup-scala dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 10 +++++----- .github/workflows/release.yml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4cfba5d06..7b048d25f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: Java 11 setup - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v12 - run: sbt "; scalafmtCheckAll; scalafmtSbtCheck" "; scalafixEnable; scalafixAll --check" test: runs-on: ubuntu-latest @@ -18,7 +18,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v12 with: java-version: ${{matrix.java}} - run: sbt "++${{matrix.scala}} test" @@ -38,7 +38,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v12 - run: | sbt ++2.12.12 coverage test coverageReport bash <(curl -s https://codecov.io/bash) @@ -49,7 +49,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v12 with: java-version: ${{matrix.java}} - run: sbt "++${{matrix.scala}} mimaReportBinaryIssues" @@ -76,5 +76,5 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v10 + uses: olafurpg/setup-scala@v12 - run: sbt docs/makeMicrosite diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 14ad8e464..8ff51977e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - - uses: olafurpg/setup-scala@v10 + - uses: olafurpg/setup-scala@v12 - name: Publish ${{ github.ref }} run: sbt ci-release env: From f6b8274cc1460aa0ff75d4906f38daa25ae3b1a0 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 15 Jun 2021 11:19:33 +0200 Subject: [PATCH 203/306] Update sbt to 1.5.4 (#988) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 67d27a1df..9edb75b77 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.3 +sbt.version=1.5.4 From 33e992b9817cc2d872bc2d650ef6a4293acfb7a7 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 9 Jul 2021 08:50:11 +0200 Subject: [PATCH 204/306] Update sbt-scalafmt to 2.4.3 (#989) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index bf5a59dd6..f82dbd13d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,7 +7,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") From 6c2f051c05b1b1b3eb3e502e0cc44eb29d4aca1f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 9 Jul 2021 13:06:41 +0200 Subject: [PATCH 205/306] Update scala-collection-compat to 2.5.0 (#990) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 3205597ac..7c780e926 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.9" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.4.4" +val scalaCollectionCompat = "2.5.0" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From 44a3e7f027fd26d3554ecd95881878dbcbe875a4 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 13 Jul 2021 17:29:33 +0200 Subject: [PATCH 206/306] Update sbt to 1.5.5 (#991) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 9edb75b77..10fd9eee0 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.4 +sbt.version=1.5.5 From 842d12c0e8d242fc5fba4b173f0c89acc4085be2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Aug 2021 16:42:51 -1000 Subject: [PATCH 207/306] Bump olafurpg/setup-scala from 12 to 13 (#992) Bumps [olafurpg/setup-scala](https://github.com/olafurpg/setup-scala) from 12 to 13. - [Release notes](https://github.com/olafurpg/setup-scala/releases) - [Commits](https://github.com/olafurpg/setup-scala/compare/v12...v13) --- updated-dependencies: - dependency-name: olafurpg/setup-scala dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 10 +++++----- .github/workflows/release.yml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7b048d25f..5d0dda132 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: Java 11 setup - uses: olafurpg/setup-scala@v12 + uses: olafurpg/setup-scala@v13 - run: sbt "; scalafmtCheckAll; scalafmtSbtCheck" "; scalafixEnable; scalafixAll --check" test: runs-on: ubuntu-latest @@ -18,7 +18,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v12 + uses: olafurpg/setup-scala@v13 with: java-version: ${{matrix.java}} - run: sbt "++${{matrix.scala}} test" @@ -38,7 +38,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v12 + uses: olafurpg/setup-scala@v13 - run: | sbt ++2.12.12 coverage test coverageReport bash <(curl -s https://codecov.io/bash) @@ -49,7 +49,7 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v12 + uses: olafurpg/setup-scala@v13 with: java-version: ${{matrix.java}} - run: sbt "++${{matrix.scala}} mimaReportBinaryIssues" @@ -76,5 +76,5 @@ jobs: - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v12 + uses: olafurpg/setup-scala@v13 - run: sbt docs/makeMicrosite diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8ff51977e..04617f445 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - - uses: olafurpg/setup-scala@v12 + - uses: olafurpg/setup-scala@v13 - name: Publish ${{ github.ref }} run: sbt ci-release env: From 00e12ac88da12039d3562fc1c535538aab305809 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 16 Aug 2021 17:35:07 +0200 Subject: [PATCH 208/306] Update sbt-scalafix to 0.9.30 (#993) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index f82dbd13d..740f326a5 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.29") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.30") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From de336f3818a6fb34f0c27302873772c6e48cd9fc Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 25 Aug 2021 17:31:54 +0200 Subject: [PATCH 209/306] Update sbt-mima-plugin to 1.0.0 (#995) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 740f326a5..b43cebff2 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") From 601a90127a59fe22ac4462da805241d1b9399d65 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 27 Aug 2021 11:33:08 +0200 Subject: [PATCH 210/306] Update kind-projector to 0.13.1 (#996) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 7c780e926..cd4accffb 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.12" -val kindProjectorVersion = "0.13.0" +val kindProjectorVersion = "0.13.1" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.9" From 100086a26db188603e677e8a23b79418d75aa08a Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 28 Aug 2021 22:32:12 +0100 Subject: [PATCH 211/306] Update scalafmt to 3.0.1 (#998) * Update scalafmt to 3.0.1 * fixup! Update scalafmt to 3.0.1 * fixup! Update scalafmt to 3.0.1 --- .scalafmt.conf | 4 +- .../benchmark/CMSHashingBenchmark.scala | 4 +- .../bijection/AlgebirdBijections.scala | 6 +- .../com/twitter/algebird/compat.scala | 8 +- .../com/twitter/algebird/AdaptiveCache.scala | 22 +- .../twitter/algebird/AdjoinedUnitRing.scala | 3 +- .../com/twitter/algebird/AffineFunction.scala | 15 +- .../com/twitter/algebird/Aggregator.scala | 175 +++---- .../com/twitter/algebird/Applicative.scala | 32 +- .../com/twitter/algebird/Approximate.scala | 16 +- .../com/twitter/algebird/AveragedValue.scala | 90 ++-- .../scala/com/twitter/algebird/Batched.scala | 96 ++-- .../com/twitter/algebird/BloomFilter.scala | 35 +- .../twitter/algebird/BufferedOperation.scala | 13 +- .../scala/com/twitter/algebird/Bytes.scala | 31 +- .../com/twitter/algebird/CMSHasher.scala | 58 ++- .../com/twitter/algebird/Combinator.scala | 17 +- .../twitter/algebird/CorrelationMonoid.scala | 36 +- .../com/twitter/algebird/CountMinSketch.scala | 463 +++++++++--------- .../com/twitter/algebird/DecayedVector.scala | 5 +- .../com/twitter/algebird/DecayingCMS.scala | 178 +++---- .../com/twitter/algebird/Eventually.scala | 42 +- .../scala/com/twitter/algebird/ExpHist.scala | 223 ++++----- .../scala/com/twitter/algebird/First.scala | 32 +- .../scala/com/twitter/algebird/Fold.scala | 103 ++-- .../scala/com/twitter/algebird/Functor.scala | 8 +- .../scala/com/twitter/algebird/Group.scala | 22 +- .../scala/com/twitter/algebird/Hash128.scala | 13 +- .../com/twitter/algebird/HyperLogLog.scala | 140 +++--- .../twitter/algebird/HyperLogLogSeries.scala | 38 +- .../com/twitter/algebird/IndexedSeq.scala | 9 +- .../scala/com/twitter/algebird/Interval.scala | 63 +-- .../com/twitter/algebird/JavaMonoids.scala | 11 +- .../scala/com/twitter/algebird/Last.scala | 27 +- .../com/twitter/algebird/MapAlgebra.scala | 15 +- .../main/scala/com/twitter/algebird/Max.scala | 70 ++- .../scala/com/twitter/algebird/Metric.scala | 14 +- .../main/scala/com/twitter/algebird/Min.scala | 55 +-- .../com/twitter/algebird/MinHasher.scala | 42 +- .../com/twitter/algebird/MomentsGroup.scala | 54 +- .../scala/com/twitter/algebird/Monad.scala | 17 +- .../scala/com/twitter/algebird/Monoid.scala | 42 +- .../com/twitter/algebird/Predecessible.scala | 11 +- .../scala/com/twitter/algebird/Preparer.scala | 68 ++- .../scala/com/twitter/algebird/Priority.scala | 11 +- .../scala/com/twitter/algebird/QTree.scala | 103 ++-- .../com/twitter/algebird/ResetAlgebra.scala | 6 +- .../com/twitter/algebird/RightFolded.scala | 9 +- .../com/twitter/algebird/RightFolded2.scala | 20 +- .../scala/com/twitter/algebird/Ring.scala | 51 +- .../com/twitter/algebird/SGDMonoid.scala | 14 +- .../scala/com/twitter/algebird/Scan.scala | 163 +++--- .../com/twitter/algebird/Semigroup.scala | 45 +- .../scala/com/twitter/algebird/SetDiff.scala | 20 +- .../com/twitter/algebird/SketchMap.scala | 38 +- .../com/twitter/algebird/SpaceSaver.scala | 38 +- .../com/twitter/algebird/StatefulSummer.scala | 15 +- .../com/twitter/algebird/Successible.scala | 19 +- .../com/twitter/algebird/SummingCache.scala | 3 +- .../twitter/algebird/SummingIterator.scala | 10 +- .../com/twitter/algebird/SummingQueue.scala | 25 +- .../com/twitter/algebird/TopKMonoid.scala | 6 +- .../scala/com/twitter/algebird/Window.scala | 46 +- .../scala/com/twitter/algebird/field.scala | 13 +- .../twitter/algebird/immutable/BitSet.scala | 186 +++---- .../algebird/immutable/BloomFilter.scala | 54 +- .../com/twitter/algebird/macros/Cuber.scala | 37 +- .../com/twitter/algebird/macros/Roller.scala | 35 +- .../algebird/matrix/AdaptiveMatrix.scala | 4 +- .../algebird/monad/StateWithError.scala | 9 +- .../twitter/algebird/monad/Trampoline.scala | 5 +- .../mutable/PriorityQueueAggregator.scala | 6 +- .../mutable/PriorityQueueMonoid.scala | 8 +- .../scala/com/twitter/algebird/package.scala | 5 +- .../twitter/algebird/statistics/Counter.scala | 5 +- .../GaussianDistributionMonoid.scala | 12 +- .../statistics/IterCallStatistics.scala | 5 +- .../algebird/statistics/Statistics.scala | 16 +- .../algebird/AlgebraResolutionTest.scala | 3 +- .../algebird/generic/EquivOrdering.scala | 6 +- .../twitter/algebird/generic/Instances.scala | 12 +- .../twitter/algebird/spark/AlgebirdRDD.scala | 47 +- .../com/twitter/algebird/spark/package.scala | 3 +- .../algebird/spark/AlgebirdRDDTests.scala | 6 +- .../algebird/ApproximateProperty.scala | 18 +- .../com/twitter/algebird/BaseProperties.scala | 7 +- .../com/twitter/algebird/AggregatorLaws.scala | 2 +- .../twitter/algebird/BloomFilterTest.scala | 2 +- .../twitter/algebird/CheckProperties.scala | 3 +- .../com/twitter/algebird/CombinatorTest.scala | 9 +- .../twitter/algebird/CountMinSketchTest.scala | 9 +- .../com/twitter/algebird/EventuallyTest.scala | 7 +- .../com/twitter/algebird/ExpHistLaws.scala | 16 +- .../twitter/algebird/HyperLogLogTest.scala | 6 +- .../com/twitter/algebird/MomentsLaws.scala | 3 +- .../algebird/NumericSpecification.scala | 5 +- .../com/twitter/algebird/SetDiffTest.scala | 3 +- .../com/twitter/algebird/TopKTests.scala | 3 +- .../algebird/immutable/BitSetTest.scala | 2 +- .../algebird/immutable/BloomFilterTest.scala | 2 +- .../algebird/util/PromiseLinkMonoid.scala | 11 +- .../twitter/algebird/util/TunnelMonoid.scala | 36 +- .../util/summer/AsyncListMMapSum.scala | 7 +- .../algebird/util/summer/AsyncListSum.scala | 3 +- .../algebird/util/summer/AsyncMapSum.scala | 3 +- .../algebird/util/summer/AsyncSummer.scala | 3 +- .../algebird/util/summer/BufferSize.scala | 3 +- .../algebird/util/summer/FlushFrequency.scala | 3 +- .../summer/HeavyHittersCachingSummer.scala | 3 +- .../algebird/util/summer/Incrementor.scala | 3 +- .../util/summer/MemoryFlushPercent.scala | 3 +- .../algebird/util/summer/NullSummer.scala | 3 +- .../util/summer/SyncSummingQueue.scala | 8 +- .../algebird/util/summer/Counter.scala | 3 +- build.sbt | 3 +- project/GenTupleAggregators.scala | 24 +- 116 files changed, 1753 insertions(+), 2008 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 5344e8f45..d30c20aaf 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,6 +1,6 @@ -version=2.7.5 +version=3.0.1 maxColumn = 110 -docstrings = JavaDoc +docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false newlines.penalizeSingleSelectMultiArgList = false align.openParenCallSite = false diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala index b5e810bc7..dd2cf1448 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/CMSHashingBenchmark.scala @@ -6,8 +6,8 @@ import com.twitter.algebird.CMSHasher /** * Benchmarks the hashing algorithms used by Count-Min sketch for CMS[BigInt]. * - * The input values are generated ahead of time to ensure that each trial uses the same input (and that the RNG is not - * influencing the runtime of the trials). + * The input values are generated ahead of time to ensure that each trial uses the same input (and that the + * RNG is not influencing the runtime of the trials). * * More details available at https://github.com/twitter/algebird/issues/392. */ diff --git a/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala b/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala index 22f0a2bbb..67f7b0bf0 100644 --- a/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala +++ b/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala @@ -22,8 +22,10 @@ import com.twitter.bijection.{AbstractBijection, Bijection, ImplicitBijection, R /** * Bijections on Algebird's abstract algebra datatypes. * - * @author Oscar Boykin - * @author Sam Ritchie + * @author + * Oscar Boykin + * @author + * Sam Ritchie */ class BijectedSemigroup[T, U](implicit val sg: Semigroup[T], bij: ImplicitBijection[T, U]) extends InvariantSemigroup[T, U](bij.bijection.toFunction, bij.bijection.inverse.toFunction) { diff --git a/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala b/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala index e97cf666d..e142c39fe 100644 --- a/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala +++ b/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala @@ -21,9 +21,11 @@ private[algebird] trait CompatFold { * Simple Fold that collects elements into a container. */ def container[I, C[_]](implicit cbf: Factory[I, C[I]]): Fold[I, C[I]] = - Fold.foldMutable[Builder[I, C[I]], I, C[I]]({ case (b, i) => b += i }, { _ => - cbf.newBuilder - }, { _.result }) + Fold.foldMutable[Builder[I, C[I]], I, C[I]]( + { case (b, i) => b += i }, + _ => cbf.newBuilder, + _.result + ) } private[algebird] trait CompatDecayedVector { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala index 35ccd7769..29329b788 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala @@ -17,16 +17,16 @@ limitations under the License. package com.twitter.algebird /** - * @author Avi Bryant + * @author + * Avi Bryant */ import collection.mutable.HashMap import ref.SoftReference /** - * This is a summing cache whose goal is to grow until we run out of memory, - * at which point it clears itself and stops growing. - * Note that we can lose the values in this cache at any point; - * we don't put anything here we care about. + * This is a summing cache whose goal is to grow until we run out of memory, at which point it clears itself + * and stops growing. Note that we can lose the values in this cache at any point; we don't put anything here + * we care about. */ class SentinelCache[K, V](implicit sgv: Semigroup[V]) { private val map = new SoftReference(new HashMap[K, V]()) @@ -52,13 +52,11 @@ class SentinelCache[K, V](implicit sgv: Semigroup[V]) { } /** - * This is a wrapper around SummingCache that attempts to grow the capacity - * by up to some maximum, as long as there's enough RAM. - * It determines that there's enough RAM to grow by maintaining a SentinelCache - * which keeps caching and summing the evicted values. - * Once the SentinelCache has grown to the same size as the current cache, - * plus some margin, without running out of RAM, then this indicates that we - * have enough headroom to double the capacity. + * This is a wrapper around SummingCache that attempts to grow the capacity by up to some maximum, as long as + * there's enough RAM. It determines that there's enough RAM to grow by maintaining a SentinelCache which + * keeps caching and summing the evicted values. Once the SentinelCache has grown to the same size as the + * current cache, plus some margin, without running out of RAM, then this indicates that we have enough + * headroom to double the capacity. */ class AdaptiveCache[K, V: Semigroup](maxCapacity: Int, growthMargin: Double = 3.0) extends StatefulSummer[Map[K, V]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdjoinedUnitRing.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdjoinedUnitRing.scala index d8f8dcfc0..b48155ad3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdjoinedUnitRing.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdjoinedUnitRing.scala @@ -19,7 +19,8 @@ import algebra.ring.Rng /** * This is for the case where your Ring[T] is a Rng (i.e. there is no unit). - * @see http://en.wikipedia.org/wiki/Pseudo-ring#Adjoining_an_identity_element + * @see + * http://en.wikipedia.org/wiki/Pseudo-ring#Adjoining_an_identity_element */ case class AdjoinedUnit[T](ones: BigInt, get: T) { def unwrap: Option[T] = if (ones == 0) Some(get) else None diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala b/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala index 1375c5800..35cbc4819 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AffineFunction.scala @@ -17,8 +17,7 @@ limitations under the License. package com.twitter.algebird /** - * Represents functions of the kind: - * f(x) = slope * x + intercept + * Represents functions of the kind: f(x) = slope * x + intercept */ case class AffineFunction[R](slope: R, intercept: R) extends java.io.Serializable { def toFn(implicit ring: Ring[R]): Function1[R, R] = { x => this.apply(x)(ring) } @@ -27,13 +26,11 @@ case class AffineFunction[R](slope: R, intercept: R) extends java.io.Serializabl } /** - * This feeds the value in on the LEFT!!! This may seem counter intuitive, but - * with this approach, a stream/iterator which is summed will have the same output - * as applying the function one at a time in order to the input. - * If we did the "lexigraphically correct" thing, which might be (f+g)(x) = f(g(x)) - * then we would wind up reversing the list in the sum. - * (f1 + f2)(x) = f2(f1(x)) so that: - * listOfFn.foldLeft(x) { (v, fn) => fn(v) } = (Monoid.sum(listOfFn))(x) + * This feeds the value in on the LEFT!!! This may seem counter intuitive, but with this approach, a + * stream/iterator which is summed will have the same output as applying the function one at a time in order + * to the input. If we did the "lexigraphically correct" thing, which might be (f+g)(x) = f(g(x)) then we + * would wind up reversing the list in the sum. (f1 + f2)(x) = f2(f1(x)) so that: listOfFn.foldLeft(x) { (v, + * fn) => fn(v) } = (Monoid.sum(listOfFn))(x) */ class AffineFunctionMonoid[R](implicit ring: Ring[R]) extends Monoid[AffineFunction[R]] { override lazy val zero: AffineFunction[R] = AffineFunction[R](ring.one, ring.zero) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala index 6dc5fb038..4e78d234b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala @@ -7,8 +7,7 @@ import scala.collection.generic.CanBuildFrom /** * Aggregators compose well. * - * To create a parallel aggregator that operates on a single - * input in parallel, use: + * To create a parallel aggregator that operates on a single input in parallel, use: * GeneratedTupleAggregator.from2((agg1, agg2)) */ object Aggregator extends java.io.Serializable { @@ -64,8 +63,8 @@ object Aggregator extends java.io.Serializable { } /** - * Obtain an [[Aggregator]] that uses an efficient append operation for faster aggregation. - * Equivalent to {{{ appendSemigroup(prep, appnd, identity[T]_)(sg) }}} + * Obtain an [[Aggregator]] that uses an efficient append operation for faster aggregation. Equivalent to + * {{{appendSemigroup(prep, appnd, identity[T]_)(sg)}}} */ def appendSemigroup[F, T](prep: F => T, appnd: (T, F) => T)(implicit sg: Semigroup[T] @@ -74,15 +73,23 @@ object Aggregator extends java.io.Serializable { /** * Obtain an [[Aggregator]] that uses an efficient append operation for faster aggregation - * @tparam F Data input type - * @tparam T Aggregating [[Semigroup]] type - * @tparam P Presentation (output) type - * @param prep The preparation function. Expected to construct an instance of type T from a single data element. - * @param appnd Function that appends the [[Semigroup]]. Defines the [[Aggregator.append]] method for this aggregator. - * Analogous to the 'seqop' function in Scala's sequence 'aggregate' method - * @param pres The presentation function - * @param sg The [[Semigroup]] type class - * @note The functions 'appnd' and 'prep' are expected to obey the law: {{{ appnd(t, f) == sg.plus(t, prep(f)) }}} + * @tparam F + * Data input type + * @tparam T + * Aggregating [[Semigroup]] type + * @tparam P + * Presentation (output) type + * @param prep + * The preparation function. Expected to construct an instance of type T from a single data element. + * @param appnd + * Function that appends the [[Semigroup]]. Defines the [[Aggregator.append]] method for this aggregator. + * Analogous to the 'seqop' function in Scala's sequence 'aggregate' method + * @param pres + * The presentation function + * @param sg + * The [[Semigroup]] type class + * @note + * The functions 'appnd' and 'prep' are expected to obey the law: {{{appnd(t, f) == sg.plus(t, prep(f))}}} */ def appendSemigroup[F, T, P](prep: F => T, appnd: (T, F) => T, pres: T => P)(implicit sg: Semigroup[T] @@ -113,22 +120,29 @@ object Aggregator extends java.io.Serializable { } /** - * Obtain a [[MonoidAggregator]] that uses an efficient append operation for faster aggregation. - * Equivalent to {{{ appendMonoid(appnd, identity[T]_)(m) }}} + * Obtain a [[MonoidAggregator]] that uses an efficient append operation for faster aggregation. Equivalent + * to {{{appendMonoid(appnd, identity[T]_)(m)}}} */ def appendMonoid[F, T](appnd: (T, F) => T)(implicit m: Monoid[T]): MonoidAggregator[F, T, T] = appendMonoid(appnd, identity[T])(m) /** * Obtain a [[MonoidAggregator]] that uses an efficient append operation for faster aggregation - * @tparam F Data input type - * @tparam T Aggregating [[Monoid]] type - * @tparam P Presentation (output) type - * @param appnd Function that appends the [[Monoid]]. Defines the [[MonoidAggregator.append]] method for this aggregator. - * Analogous to the 'seqop' function in Scala's sequence 'aggregate' method - * @param pres The presentation function - * @param m The [[Monoid]] type class - * @note The function 'appnd' is expected to obey the law: {{{ appnd(t, f) == m.plus(t, appnd(m.zero, f)) }}} + * @tparam F + * Data input type + * @tparam T + * Aggregating [[Monoid]] type + * @tparam P + * Presentation (output) type + * @param appnd + * Function that appends the [[Monoid]]. Defines the [[MonoidAggregator.append]] method for this + * aggregator. Analogous to the 'seqop' function in Scala's sequence 'aggregate' method + * @param pres + * The presentation function + * @param m + * The [[Monoid]] type class + * @note + * The function 'appnd' is expected to obey the law: {{{appnd(t, f) == m.plus(t, appnd(m.zero, f))}}} */ def appendMonoid[F, T, P](appnd: (T, F) => T, pres: T => P)(implicit m: Monoid[T] @@ -249,9 +263,9 @@ object Aggregator extends java.io.Serializable { new TopKToListAggregator[T](count)(implicitly[Ordering[T]].reverse) /** - * Randomly selects input items where each item has an independent probability 'prob' of being - * selected. This assumes that all sampled records can fit in memory, so use this only when the - * expected number of sampled values is small. + * Randomly selects input items where each item has an independent probability 'prob' of being selected. + * This assumes that all sampled records can fit in memory, so use this only when the expected number of + * sampled values is small. */ def randomSample[T]( prob: Double, @@ -265,9 +279,9 @@ object Aggregator extends java.io.Serializable { } /** - * Selects exactly 'count' of the input records randomly (or all of the records if there are less - * then 'count' total records). This assumes that all 'count' of the records can fit in memory, - * so use this only for small values of 'count'. + * Selects exactly 'count' of the input records randomly (or all of the records if there are less then + * 'count' total records). This assumes that all 'count' of the records can fit in memory, so use this only + * for small values of 'count'. */ def reservoirSample[T]( count: Int, @@ -299,26 +313,24 @@ object Aggregator extends java.io.Serializable { prepareMonoid { t: T => Set(t) } /** - * This builds an in-memory Set, and then finally gets the size of that set. - * This may not be scalable if the Uniques are very large. You might check the - * approximateUniqueCount or HyperLogLog Aggregator to get an approximate version - * of this that is scalable. + * This builds an in-memory Set, and then finally gets the size of that set. This may not be scalable if the + * Uniques are very large. You might check the approximateUniqueCount or HyperLogLog Aggregator to get an + * approximate version of this that is scalable. */ def uniqueCount[T]: MonoidAggregator[T, Set[T], Int] = toSet[T].andThenPresent(_.size) /** - * Using a constant amount of memory, give an approximate unique count (~ 1% error). - * This uses an exact set for up to 100 items, - * then HyperLogLog (HLL) with an 1.2% standard error which uses at most 8192 bytes - * for each HLL. For more control, see HyperLogLogAggregator. + * Using a constant amount of memory, give an approximate unique count (~ 1% error). This uses an exact set + * for up to 100 items, then HyperLogLog (HLL) with an 1.2% standard error which uses at most 8192 bytes for + * each HLL. For more control, see HyperLogLogAggregator. */ def approximateUniqueCount[T: Hash128]: MonoidAggregator[T, Either[HLL, Set[T]], Long] = SetSizeHashAggregator[T](hllBits = 13, maxSetSize = 100) /** - * Returns the lower bound of a given percentile where the percentile is between (0,1] - * The items that are iterated over cannot be negative. + * Returns the lower bound of a given percentile where the percentile is between (0,1] The items that are + * iterated over cannot be negative. */ def approximatePercentile[T](percentile: Double, k: Int = QTreeAggregator.DefaultK)(implicit num: Numeric[T] @@ -326,8 +338,8 @@ object Aggregator extends java.io.Serializable { QTreeAggregatorLowerBound[T](percentile, k) /** - * Returns the intersection of a bounded percentile where the percentile is between (0,1] - * The items that are iterated over cannot be negative. + * Returns the intersection of a bounded percentile where the percentile is between (0,1] The items that are + * iterated over cannot be negative. */ def approximatePercentileBounds[T](percentile: Double, k: Int = QTreeAggregator.DefaultK)(implicit num: Numeric[T] @@ -337,16 +349,15 @@ object Aggregator extends java.io.Serializable { /** * An aggregator that sums Numeric values into Doubles. * - * This is really no more than converting to Double and then summing. The conversion to double - * means we don't have the overflow semantics of integer types on the jvm - * (e.g. Int.MaxValue + 1 == Int.MinValue). + * This is really no more than converting to Double and then summing. The conversion to double means we + * don't have the overflow semantics of integer types on the jvm (e.g. Int.MaxValue + 1 == Int.MinValue). * - * Note that if you instead wanted to aggregate Numeric values of a type T into the same type T - * (e.g. if you want MonoidAggregator[T, T, T] for some Numeric type T), you can directly use - * Aggregator.fromMonoid[T] after importing the numericRing implicit: + * Note that if you instead wanted to aggregate Numeric values of a type T into the same type T (e.g. if you + * want MonoidAggregator[T, T, T] for some Numeric type T), you can directly use Aggregator.fromMonoid[T] + * after importing the numericRing implicit: * - * > import com.twitter.algebird.Ring.numericRing - * > def numericAggregator[T: Numeric]: MonoidAggregator[T, T, T] = Aggregator.fromMonoid[T] + * > import com.twitter.algebird.Ring.numericRing > def numericAggregator[T: Numeric]: MonoidAggregator[T, + * T, T] = Aggregator.fromMonoid[T] */ def numericSum[T](implicit num: Numeric[T]): MonoidAggregator[T, Double, Double] = Preparer[T].map(num.toDouble).monoidAggregate(Aggregator.fromMonoid) @@ -354,19 +365,17 @@ object Aggregator extends java.io.Serializable { } /** - * This is a type that models map/reduce(map). First each item is mapped, - * then we reduce with a semigroup, then finally we present the results. + * This is a type that models map/reduce(map). First each item is mapped, then we reduce with a semigroup, + * then finally we present the results. * - * Unlike Fold, Aggregator keeps it's middle aggregation type externally visible. - * This is because Aggregators are useful in parallel map/reduce systems where - * there may be some additional types needed to cross the map/reduce boundary - * (such a serialization and intermediate storage). If you don't care about the - * middle type, an _ may be used and the main utility of the instance is still - * preserved (e.g. def operate[T](ag: Aggregator[T, _, Int]): Int) + * Unlike Fold, Aggregator keeps it's middle aggregation type externally visible. This is because Aggregators + * are useful in parallel map/reduce systems where there may be some additional types needed to cross the + * map/reduce boundary (such a serialization and intermediate storage). If you don't care about the middle + * type, an _ may be used and the main utility of the instance is still preserved (e.g. def operate[T](ag: + * Aggregator[T, _, Int]): Int) * - * Note, join is very useful to combine multiple aggregations with one pass. - * Also GeneratedTupleAggregator.fromN((agg1, agg2, ... aggN)) can glue these - * together well. + * Note, join is very useful to combine multiple aggregations with one pass. Also + * GeneratedTupleAggregator.fromN((agg1, agg2, ... aggN)) can glue these together well. * * This type is the the Fold.M from Haskell's fold package: * https://hackage.haskell.org/package/folds-0.6.2/docs/Data-Fold-M.html @@ -386,21 +395,20 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => def reduce(l: B, r: B): B = semigroup.plus(l, r) /** - * This may error if items is empty. To be safe you might use reduceOption - * if you don't know that items is non-empty + * This may error if items is empty. To be safe you might use reduceOption if you don't know that items is + * non-empty */ def reduce(items: TraversableOnce[B]): B = semigroup.sumOption(items).get /** - * This is the safe version of the above. If the input in empty, return None, - * else reduce the items + * This is the safe version of the above. If the input in empty, return None, else reduce the items */ def reduceOption(items: TraversableOnce[B]): Option[B] = semigroup.sumOption(items) /** - * This may error if inputs are empty (for Monoid Aggregators it never will, instead - * you see present(Monoid.zero[B]) + * This may error if inputs are empty (for Monoid Aggregators it never will, instead you see + * present(Monoid.zero[B]) */ def apply(inputs: TraversableOnce[A]): C = present(reduce(inputs.iterator.map(prepare))) @@ -413,8 +421,8 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => .map(present) /** - * This returns the cumulative sum of its inputs, in the same order. - * If the inputs are empty, the result will be empty too. + * This returns the cumulative sum of its inputs, in the same order. If the inputs are empty, the result + * will be empty too. */ def cumulativeIterator(inputs: Iterator[A]): Iterator[C] = inputs @@ -425,8 +433,8 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => .collect { case Some(b) => present(b) } /** - * This returns the cumulative sum of its inputs, in the same order. - * If the inputs are empty, the result will be empty too. + * This returns the cumulative sum of its inputs, in the same order. If the inputs are empty, the result + * will be empty too. */ def applyCumulatively[In <: TraversableOnce[A], Out]( inputs: In @@ -461,9 +469,9 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => GeneratedTupleAggregator.from2((this, that)) /** - * This allows you to join two aggregators into one that takes a tuple input, - * which in turn allows you to chain .composePrepare onto the result if you have - * an initial input that has to be prepared differently for each of the joined aggregators. + * This allows you to join two aggregators into one that takes a tuple input, which in turn allows you to + * chain .composePrepare onto the result if you have an initial input that has to be prepared differently + * for each of the joined aggregators. * * The law here is: ag1.zip(ag2).apply(as.zip(bs)) == (ag1(as), ag2(bs)) */ @@ -477,9 +485,8 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => } /** - * An Aggregator can be converted to a Fold, but not vice-versa - * Note, a Fold is more constrained so only do this if you require - * joining a Fold with an Aggregator to produce a Fold + * An Aggregator can be converted to a Fold, but not vice-versa Note, a Fold is more constrained so only do + * this if you require joining a Fold with an Aggregator to produce a Fold */ def toFold: Fold[A, Option[C]] = Fold.fold[Option[B], A, Option[C]]( @@ -500,8 +507,8 @@ trait Aggregator[-A, B, +C] extends java.io.Serializable { self => } /** - * Aggregators are Applicatives, but this hides the middle type. If you need a join that - * does not hide the middle type use join on the trait, or GeneratedTupleAggregator.fromN + * Aggregators are Applicatives, but this hides the middle type. If you need a join that does not hide the + * middle type use join on the trait, or GeneratedTupleAggregator.fromN */ class AggregatorApplicative[I] extends Applicative[({ type L[O] = Aggregator[I, _, O] })#L] { override def map[T, U](mt: Aggregator[I, _, T])(fn: T => U): Aggregator[I, _, U] = @@ -561,8 +568,7 @@ trait MonoidAggregator[-A, B, +C] extends Aggregator[A, B, C] { self => } /** - * Build a MonoidAggregator that either takes left or right input - * and outputs the pair from both + * Build a MonoidAggregator that either takes left or right input and outputs the pair from both */ def either[A2, B2, C2]( that: MonoidAggregator[A2, B2, C2] @@ -598,8 +604,7 @@ trait MonoidAggregator[-A, B, +C] extends Aggregator[A, B, C] { self => } /** - * This maps the inputs to Bs, then sums them, effectively flattening - * the inputs to the MonoidAggregator + * This maps the inputs to Bs, then sums them, effectively flattening the inputs to the MonoidAggregator */ def sumBefore: MonoidAggregator[TraversableOnce[A], B, C] = new MonoidAggregator[TraversableOnce[A], B, C] { @@ -610,9 +615,9 @@ trait MonoidAggregator[-A, B, +C] extends Aggregator[A, B, C] { self => } /** - * This allows you to join two aggregators into one that takes a tuple input, - * which in turn allows you to chain .composePrepare onto the result if you have - * an initial input that has to be prepared differently for each of the joined aggregators. + * This allows you to join two aggregators into one that takes a tuple input, which in turn allows you to + * chain .composePrepare onto the result if you have an initial input that has to be prepared differently + * for each of the joined aggregators. * * The law here is: ag1.zip(ag2).apply(as.zip(bs)) == (ag1(as), ag2(bs)) */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala index afb7eb17f..32a66339a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala @@ -19,18 +19,14 @@ import scala.annotation.implicitNotFound import scala.collection.compat._ /** - * Simple implementation of an Applicative type-class. - * There are many choices for the canonical second operation (join, sequence, joinWith, ap), - * all equivalent. For a Functor modeling concurrent computations with failure, like Future, - * combining results with join can save a lot of time over combining with flatMap. (Given two - * operations, if the second fails before the first completes, one can fail the entire computation - * right then. With flatMap, one would have to wait for the first operation to complete before - * failing it.) + * Simple implementation of an Applicative type-class. There are many choices for the canonical second + * operation (join, sequence, joinWith, ap), all equivalent. For a Functor modeling concurrent computations + * with failure, like Future, combining results with join can save a lot of time over combining with flatMap. + * (Given two operations, if the second fails before the first completes, one can fail the entire computation + * right then. With flatMap, one would have to wait for the first operation to complete before failing it.) * - * Laws Applicatives must follow: - * map(apply(x))(f) == apply(f(x)) - * join(apply(x), apply(y)) == apply((x, y)) - * (sequence and joinWith specialize join - they should behave appropriately) + * Laws Applicatives must follow: map(apply(x))(f) == apply(f(x)) join(apply(x), apply(y)) == apply((x, y)) + * (sequence and joinWith specialize join - they should behave appropriately) */ @implicitNotFound(msg = "Cannot find Applicative type class for ${M}") trait Applicative[M[_]] extends Functor[M] { @@ -127,8 +123,8 @@ class PureOp[A](val a: A) extends AnyVal { } /** - * This enrichment allows us to use our Applicative instances in for expressions: - * if (import Applicative._) has been done + * This enrichment allows us to use our Applicative instances in for expressions: if (import Applicative._) + * has been done */ class ApplicativeOperators[A, M[_]](m: M[A])(implicit app: Applicative[M]) extends FunctorOperators[A, M](m) { def join[B](mb: M[B]): M[(A, B)] = app.join(m, mb) @@ -152,9 +148,8 @@ class ApplicativeMonoid[T, M[_]](implicit app: Applicative[M], mon: Monoid[T]) } /** - * Group and Ring ARE NOT AUTOMATIC. You have to check that the laws hold for your - * Applicative. If your M[_] is a wrapper type (Option[_], Some[_], Try[_], Future[_], etc...) - * this generally works. + * Group and Ring ARE NOT AUTOMATIC. You have to check that the laws hold for your Applicative. If your M[_] + * is a wrapper type (Option[_], Some[_], Try[_], Future[_], etc...) this generally works. */ class ApplicativeGroup[T, M[_]](implicit app: Applicative[M], grp: Group[T]) extends ApplicativeMonoid[T, M] @@ -164,9 +159,8 @@ class ApplicativeGroup[T, M[_]](implicit app: Applicative[M], grp: Group[T]) } /** - * Group and Ring ARE NOT AUTOMATIC. You have to check that the laws hold for your - * Applicative. If your M[_] is a wrapper type (Option[_], Some[_], Try[_], Future[_], etc...) - * this generally works. + * Group and Ring ARE NOT AUTOMATIC. You have to check that the laws hold for your Applicative. If your M[_] + * is a wrapper type (Option[_], Some[_], Try[_], Future[_], etc...) this generally works. */ class ApplicativeRing[T, M[_]](implicit app: Applicative[M], ring: Ring[T]) extends ApplicativeGroup[T, M] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala b/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala index 9a9adde99..ad5b628b2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Approximate.scala @@ -77,9 +77,7 @@ case class Approximate[N](min: N, estimate: N, max: N, probWithinBounds: Double) require(numeric.lteq(min, estimate) && numeric.lteq(estimate, max)) /** - * Is this value contained within the bounds? - * Contract is: - * Prob(boundsContain(estimate)) >= probWithinBounds + * Is this value contained within the bounds? Contract is: Prob(boundsContain(estimate)) >= probWithinBounds */ def boundsContain(v: N): Boolean = numeric.lteq(min, v) && numeric.lteq(v, max) @@ -88,8 +86,7 @@ case class Approximate[N](min: N, estimate: N, max: N, probWithinBounds: Double) ApproximateBoolean(boundsContain(v), probWithinBounds) /** - * This is so you can do: val x = Approximate(1.0, 1.1, 1.2, 0.99) - * and then x ~ 1.05 returns true + * This is so you can do: val x = Approximate(1.0, 1.1, 1.2, 0.99) and then x ~ 1.05 returns true */ def ~(v: N): Boolean = boundsContain(v) @@ -108,12 +105,9 @@ case class Approximate[N](min: N, estimate: N, max: N, probWithinBounds: Double) this.+(right.negate) /** - * This is not distributive, because: - * a*(b+c) has two probability multiplications - * while (a*b + a*b) has three - * Some kind of general formula solver could possibly - * make this distributive, but in the mean time, it's only - * a group + * This is not distributive, because: a*(b+c) has two probability multiplications while (a*b + a*b) has + * three Some kind of general formula solver could possibly make this distributive, but in the mean time, + * it's only a group */ def *(right: Approximate[N]): Approximate[N] = if (right.isZero || isOne) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala index 81630d5a3..9d684db79 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala @@ -22,19 +22,20 @@ import scala.collection.compat._ /** * Tracks the count and mean value of Doubles in a data stream. * - * Adding two instances of [[AveragedValue]] with [[+]] - * is equivalent to taking an average of the two streams, with each - * stream weighted by its count. + * Adding two instances of [[AveragedValue]] with [[+]] is equivalent to taking an average of the two streams, + * with each stream weighted by its count. * - * The mean calculation uses a numerically stable online algorithm - * suitable for large numbers of records, similar to Chan et. al.'s - * [[http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm - * parallel variance algorithm on Wikipedia]]. As long as your count - * doesn't overflow a Long, the mean calculation won't overflow. + * The mean calculation uses a numerically stable online algorithm suitable for large numbers of records, + * similar to Chan et. al.'s + * [[http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm parallel variance algorithm on Wikipedia]]. + * As long as your count doesn't overflow a Long, the mean calculation won't overflow. * - * @see [[MomentsGroup.getCombinedMean]] for implementation of [[+]] - * @param count the number of aggregated items - * @param value the average value of all aggregated items + * @see + * [[MomentsGroup.getCombinedMean]] for implementation of [[+]] + * @param count + * the number of aggregated items + * @param value + * the average value of all aggregated items */ case class AveragedValue(count: Long, value: Double) { @@ -48,27 +49,32 @@ case class AveragedValue(count: Long, value: Double) { def unary_- : AveragedValue = copy(count = -count) /** - * Averages this instance with the *opposite* of the supplied - * [[AveragedValue]] instance, effectively subtracting out that - * instance's contribution to the mean. + * Averages this instance with the *opposite* of the supplied [[AveragedValue]] instance, effectively + * subtracting out that instance's contribution to the mean. * - * @param r the instance to subtract - * @return an instance with `r`'s stream subtracted out + * @param r + * the instance to subtract + * @return + * an instance with `r`'s stream subtracted out */ def -(r: AveragedValue): AveragedValue = AveragedGroup.minus(this, r) /** * Averages this instance with another [[AveragedValue]] instance. - * @param r the other instance - * @return an instance representing the mean of this instance and `r`. + * @param r + * the other instance + * @return + * an instance representing the mean of this instance and `r`. */ def +(r: AveragedValue): AveragedValue = AveragedGroup.plus(this, r) /** * Returns a new instance that averages `that` into this instance. * - * @param that value to average into this instance - * @return an instance representing the mean of this instance and `that`. + * @param that + * value to average into this instance + * @return + * an instance representing the mean of this instance and `that`. */ def +(that: Double): AveragedValue = AveragedValue(count + 1L, MomentsGroup.getCombinedMean(count, value, 1L, that)) @@ -76,33 +82,33 @@ case class AveragedValue(count: Long, value: Double) { /** * Returns a new instance that averages `that` into this instance. * - * @param that value to average into this instance - * @return an instance representing the mean of this instance and `that`. + * @param that + * value to average into this instance + * @return + * an instance representing the mean of this instance and `that`. */ def +[N](that: N)(implicit num: Numeric[N]): AveragedValue = this + num.toDouble(that) } /** - * Provides a set of operations needed to create and use - * [[AveragedValue]] instances. + * Provides a set of operations needed to create and use [[AveragedValue]] instances. */ object AveragedValue { implicit val group: Group[AveragedValue] = AveragedGroup /** - * Returns an [[Aggregator]] that uses [[AveragedValue]] to - * calculate the mean of all `Double` values in the stream. Each - * Double value receives a count of 1 during aggregation. + * Returns an [[Aggregator]] that uses [[AveragedValue]] to calculate the mean of all `Double` values in the + * stream. Each Double value receives a count of 1 during aggregation. */ def aggregator: Aggregator[Double, AveragedValue, Double] = Averager /** - * Returns an [[Aggregator]] that uses [[AveragedValue]] to - * calculate the mean of all values in the stream. Each numeric - * value receives a count of `1` during aggregation. + * Returns an [[Aggregator]] that uses [[AveragedValue]] to calculate the mean of all values in the stream. + * Each numeric value receives a count of `1` during aggregation. * - * @tparam N numeric type to convert into `Double` + * @tparam N + * numeric type to convert into `Double` */ def numericAggregator[N](implicit num: Numeric[N]): MonoidAggregator[N, AveragedValue, Double] = Aggregator @@ -112,15 +118,16 @@ object AveragedValue { /** * Creates [[AveragedValue]] with a value of `v` and a count of 1. * - * @tparam V type with an implicit conversion to Double + * @tparam V + * type with an implicit conversion to Double */ def apply[V: Numeric](v: V): AveragedValue = apply(1L, v) /** - * Creates an [[AveragedValue]] with a count of of `c` and a value - * of `v`. + * Creates an [[AveragedValue]] with a count of of `c` and a value of `v`. * - * @tparam V type with an implicit conversion to Double + * @tparam V + * type with an implicit conversion to Double */ def apply[V](c: Long, v: V)(implicit num: Numeric[V]): AveragedValue = AveragedValue(c, num.toDouble(v)) @@ -129,7 +136,8 @@ object AveragedValue { /** * [[Group]] implementation for [[AveragedValue]]. * - * @define T `AveragedValue` + * @define T + * `AveragedValue` */ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[AveragedValue] { import MomentsGroup.getCombinedMean @@ -141,9 +149,8 @@ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[Averaged override def negate(av: AveragedValue): AveragedValue = -av /** - * Optimized implementation of [[plus]]. Uses internal mutation to - * combine the supplied [[AveragedValue]] instances without creating - * intermediate objects. + * Optimized implementation of [[plus]]. Uses internal mutation to combine the supplied [[AveragedValue]] + * instances without creating intermediate objects. */ override def sumOption(iter: TraversableOnce[AveragedValue]): Option[AveragedValue] = if (iter.iterator.isEmpty) None @@ -171,9 +178,8 @@ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[Averaged } /** - * [[Aggregator]] that uses [[AveragedValue]] to calculate the mean - * of all `Double` values in the stream. Each Double value receives a - * count of 1 during aggregation. + * [[Aggregator]] that uses [[AveragedValue]] to calculate the mean of all `Double` values in the stream. Each + * Double value receives a count of 1 during aggregation. */ object Averager extends MonoidAggregator[Double, AveragedValue, Double] { override val monoid: AveragedGroup.type = AveragedGroup diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala b/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala index 14997976d..d209a98dc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala @@ -6,19 +6,15 @@ import scala.collection.compat._ /** * Batched: the free semigroup. * - * For any type `T`, `Batched[T]` represents a way to lazily combine T - * values as a semigroup would (i.e. associatively). A `Semigroup[T]` - * instance can be used to recover a `T` value from a `Batched[T]`. + * For any type `T`, `Batched[T]` represents a way to lazily combine T values as a semigroup would (i.e. + * associatively). A `Semigroup[T]` instance can be used to recover a `T` value from a `Batched[T]`. * - * Like other free structures, Batched trades space for time. A sum of - * batched values defers the underlying semigroup action, instead - * storing all values in memory (in a tree structure). If an - * underlying semigroup is available, `Batched.semigroup` and - * `Batch.monoid` can be configured to periodically sum the tree to - * keep the overall size below `batchSize`. + * Like other free structures, Batched trades space for time. A sum of batched values defers the underlying + * semigroup action, instead storing all values in memory (in a tree structure). If an underlying semigroup is + * available, `Batched.semigroup` and `Batch.monoid` can be configured to periodically sum the tree to keep + * the overall size below `batchSize`. * - * `Batched[T]` values are guaranteed not to be empty -- that is, they - * will contain at least one `T` value. + * `Batched[T]` values are guaranteed not to be empty -- that is, they will contain at least one `T` value. */ sealed abstract class Batched[T] extends Serializable { @@ -30,8 +26,7 @@ sealed abstract class Batched[T] extends Serializable { /** * Combine two batched values. * - * As mentioned above, this just creates a new tree structure - * containing `this` and `that`. + * As mentioned above, this just creates a new tree structure containing `this` and `that`. */ def combine(that: Batched[T]): Batched[T] = Batched.Items(this, that) @@ -39,8 +34,7 @@ sealed abstract class Batched[T] extends Serializable { /** * Compact this batch if it exceeds `batchSize`. * - * Compacting a branch means summing it, and then storing the summed - * value in a new single-item batch. + * Compacting a branch means summing it, and then storing the summed value in a new single-item batch. */ def compact(batchSize: Int)(implicit s: Semigroup[T]): Batched[T] = if (size < batchSize) this else Batched.Item(sum(s)) @@ -58,9 +52,8 @@ sealed abstract class Batched[T] extends Serializable { * * This is the order used by `.sum`. * - * This iterator traverses the tree from left-to-right. If the - * original expression was (w + x + y + z), this iterator returns w, - * x, y, and then z. + * This iterator traverses the tree from left-to-right. If the original expression was (w + x + y + z), this + * iterator returns w, x, y, and then z. */ def iterator: Iterator[T] = this match { @@ -77,9 +70,8 @@ sealed abstract class Batched[T] extends Serializable { /** * Provide a reversed iterator over the underlying tree structure. * - * This iterator traverses the tree from right-to-left. If the - * original expression was (w + x + y + z), this iterator returns z, - * y, x, and then w. + * This iterator traverses the tree from right-to-left. If the original expression was (w + x + y + z), this + * iterator returns z, y, x, and then w. */ def reverseIterator: Iterator[T] = this match { @@ -106,8 +98,7 @@ object Batched { /** * Constructed an optional batch from a collection of values. * - * Since batches cannot be empty, this method returns `None` if `ts` - * is empty, and `Some(batch)` otherwise. + * Since batches cannot be empty, this method returns `None` if `ts` is empty, and `Some(batch)` otherwise. */ def items[T](ts: TraversableOnce[T]): Option[Batched[T]] = if (ts.iterator.isEmpty) None @@ -120,13 +111,11 @@ object Batched { /** * Equivalence for batches. * - * Batches are equivalent if they sum to the same value. Since the - * free semigroup is associative, it's not correct to take tree - * structure into account when determining equality. + * Batches are equivalent if they sum to the same value. Since the free semigroup is associative, it's not + * correct to take tree structure into account when determining equality. * - * One thing to note here is that two equivalent batches might - * produce different lists (for instance, if one of the batches has - * more zeros in it than another one). + * One thing to note here is that two equivalent batches might produce different lists (for instance, if one + * of the batches has more zeros in it than another one). */ implicit def equiv[A](implicit e: Equiv[A], s: Semigroup[A]): Equiv[Batched[A]] = new Equiv[Batched[A]] { @@ -137,8 +126,7 @@ object Batched { /** * The free semigroup for batched values. * - * This semigroup just accumulates batches and doesn't ever evaluate - * them to flatten the tree. + * This semigroup just accumulates batches and doesn't ever evaluate them to flatten the tree. */ implicit def semigroup[A]: Semigroup[Batched[A]] = new Semigroup[Batched[A]] { @@ -148,9 +136,8 @@ object Batched { /** * Compacting semigroup for batched values. * - * This semigroup ensures that the batch's tree structure has fewer - * than `batchSize` values in it. When more values are added, the - * tree is compacted using `s`. + * This semigroup ensures that the batch's tree structure has fewer than `batchSize` values in it. When more + * values are added, the tree is compacted using `s`. */ def compactingSemigroup[A: Semigroup](batchSize: Int): Semigroup[Batched[A]] = new BatchedSemigroup[A](batchSize) @@ -158,24 +145,20 @@ object Batched { /** * Compacting monoid for batched values. * - * This monoid ensures that the batch's tree structure has fewer - * than `batchSize` values in it. When more values are added, the - * tree is compacted using `m`. + * This monoid ensures that the batch's tree structure has fewer than `batchSize` values in it. When more + * values are added, the tree is compacted using `m`. * - * It's worth noting that `x + 0` here will produce the same sum as - * `x`, but `.toList` will produce different lists (one will have an - * extra zero). + * It's worth noting that `x + 0` here will produce the same sum as `x`, but `.toList` will produce + * different lists (one will have an extra zero). */ def compactingMonoid[A: Monoid](batchSize: Int): Monoid[Batched[A]] = new BatchedMonoid[A](batchSize) /** - * This aggregator batches up `agg` so that all the addition can be - * performed at once. + * This aggregator batches up `agg` so that all the addition can be performed at once. * - * It is useful when `sumOption` is much faster than using `plus` - * (e.g. when there is temporary mutable state used to make - * summation fast). + * It is useful when `sumOption` is much faster than using `plus` (e.g. when there is temporary mutable + * state used to make summation fast). */ def aggregator[A, B, C](batchSize: Int, agg: Aggregator[A, B, C]): Aggregator[A, Batched[B], C] = new Aggregator[A, Batched[B], C] { @@ -186,12 +169,10 @@ object Batched { } /** - * This monoid aggregator batches up `agg` so that all the addition - * can be performed at once. + * This monoid aggregator batches up `agg` so that all the addition can be performed at once. * - * It is useful when `sumOption` is much faster than using `plus` - * (e.g. when there is temporary mutable state used to make - * summation fast). + * It is useful when `sumOption` is much faster than using `plus` (e.g. when there is temporary mutable + * state used to make summation fast). */ def monoidAggregator[A, B, C]( batchSize: Int, @@ -240,9 +221,8 @@ object Batched { /** * Abstract iterator through a batch's tree. * - * This class is agnostic about whether the traversal is - * left-to-right or right-to-left. The abstract method `descend` - * controls which direction the iterator moves. + * This class is agnostic about whether the traversal is left-to-right or right-to-left. The abstract method + * `descend` controls which direction the iterator moves. */ private[algebird] abstract class ItemsIterator[A](root: Batched[A]) extends Iterator[A] { var stack: List[Batched[A]] = Nil @@ -311,9 +291,8 @@ object Batched { /** * Compacting semigroup for batched values. * - * This semigroup ensures that the batch's tree structure has fewer - * than `batchSize` values in it. When more values are added, the - * tree is compacted using `s`. + * This semigroup ensures that the batch's tree structure has fewer than `batchSize` values in it. When more + * values are added, the tree is compacted using `s`. */ class BatchedSemigroup[T: Semigroup](batchSize: Int) extends Semigroup[Batched[T]] { @@ -326,9 +305,8 @@ class BatchedSemigroup[T: Semigroup](batchSize: Int) extends Semigroup[Batched[T /** * Compacting monoid for batched values. * - * This monoid ensures that the batch's tree structure has fewer - * than `batchSize` values in it. When more values are added, the - * tree is compacted using `m`. + * This monoid ensures that the batch's tree structure has fewer than `batchSize` values in it. When more + * values are added, the tree is compacted using `m`. */ class BatchedMonoid[T: Monoid](batchSize: Int) extends BatchedSemigroup[T](batchSize) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index c12976739..bda97981d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -125,19 +125,17 @@ object BloomFilter { } /** - * Cardinality estimates are taken from Theorem 1 on page 15 of - * "Cardinality estimation and dynamic length adaptation for Bloom filters" - * by Papapetrou, Siberski, and Nejdl: + * Cardinality estimates are taken from Theorem 1 on page 15 of "Cardinality estimation and dynamic length + * adaptation for Bloom filters" by Papapetrou, Siberski, and Nejdl: * http://www.softnet.tuc.gr/~papapetrou/publications/Bloomfilters-DAPD.pdf * - * Roughly, by using bounds on the expected number of true bits after n elements - * have been inserted into the Bloom filter, we can go from the actual number of - * true bits (which is known) to an estimate of the cardinality. + * Roughly, by using bounds on the expected number of true bits after n elements have been inserted into the + * Bloom filter, we can go from the actual number of true bits (which is known) to an estimate of the + * cardinality. * - * approximationWidth defines an interval around the maximum-likelihood cardinality - * estimate. Namely, the approximation returned is of the form - * (min, estimate, max) = - * ((1 - approxWidth) * estimate, estimate, (1 + approxWidth) * estimate) + * approximationWidth defines an interval around the maximum-likelihood cardinality estimate. Namely, the + * approximation returned is of the form (min, estimate, max) = ((1 - approxWidth) * estimate, estimate, (1 + * + approxWidth) * estimate) */ def sizeEstimate( numBits: Int, @@ -185,9 +183,8 @@ object BloomFilter { /** * Bloom Filter - a probabilistic data structure to test presence of an element. * - * Operations - * 1) insert: hash the value k times, updating the bitfield at the index equal to each hashed value - * 2) query: hash the value k times. If there are k collisions, then return true; otherwise false. + * Operations 1) insert: hash the value k times, updating the bitfield at the index equal to each hashed value + * 2) query: hash the value k times. If there are k collisions, then return true; otherwise false. * * http://en.wikipedia.org/wiki/Bloom_filter */ @@ -199,8 +196,8 @@ case class BloomFilterMonoid[A](numHashes: Int, width: Int)(implicit hash: Hash1 override val zero: BF[A] = BFZero[A](hashes, width) /** - * Assume the bloom filters are compatible (same width and same hashing functions). This - * is the union of the 2 bloom filters. + * Assume the bloom filters are compatible (same width and same hashing functions). This is the union of the + * 2 bloom filters. */ override def plus(left: BF[A], right: BF[A]): BF[A] = left ++ right @@ -380,8 +377,7 @@ sealed abstract class BF[A] extends java.io.Serializable { } /** - * This may be faster if you don't care about evaluating - * the false positive probability + * This may be faster if you don't care about evaluating the false positive probability */ def maybeContains(item: A): Boolean @@ -392,9 +388,8 @@ sealed abstract class BF[A] extends java.io.Serializable { def toBitSet: BitSet /** - * Compute the Hamming distance between the two Bloom filters - * `a` and `b`. The distance is defined as the number of bits that - * need to change to in order to transform one filter into the other. + * Compute the Hamming distance between the two Bloom filters `a` and `b`. The distance is defined as the + * number of bits that need to change to in order to transform one filter into the other. */ def hammingDistance(that: BF[A]): Int = (this, that) match { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala b/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala index 63cce239a..e8c45b668 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala @@ -19,9 +19,8 @@ package com.twitter.algebird import scala.collection.mutable.ArrayBuffer /** - * Represents something that consumes I and may emit O. Has some internal - * state that may be used to improve performance. - * Generally used to model folds or reduces (see BufferedReduce) + * Represents something that consumes I and may emit O. Has some internal state that may be used to improve + * performance. Generally used to model folds or reduces (see BufferedReduce) */ trait Buffered[I, O] extends java.io.Serializable { def put(i: I): Option[O] @@ -56,8 +55,8 @@ abstract class ArrayBufferedOperation[I, O](size: Int) extends Buffered[I, O] { object ArrayBufferedOperation { /** - * Returns an ArrayBufferedOperation instance that internally uses - * the `sumOption` implementation of the supplied Semigroup[T] + * Returns an ArrayBufferedOperation instance that internally uses the `sumOption` implementation of the + * supplied Semigroup[T] */ def fromSumOption[T](size: Int)(implicit sg: Semigroup[T]): BufferedReduce[T] = new ArrayBufferedOperation[T, T](size) with BufferedReduce[T] { @@ -68,8 +67,8 @@ object ArrayBufferedOperation { } /** - * This never emits on put, you must call flush - * designed to be use in the stackable pattern with ArrayBufferedOperation + * This never emits on put, you must call flush designed to be use in the stackable pattern with + * ArrayBufferedOperation */ trait BufferedReduce[V] extends Buffered[V, V] { abstract override def put(item: V): Option[V] = { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Bytes.scala b/algebird-core/src/main/scala/com/twitter/algebird/Bytes.scala index 403a40e19..d774e19d3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Bytes.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Bytes.scala @@ -3,16 +3,16 @@ package com.twitter.algebird import java.nio.ByteBuffer /** - * A wrapper for `Array[Byte]` that provides sane implementations of `hashCode`, `equals`, and `toString`. - * The wrapped array of bytes is assumed to be never modified. + * A wrapper for `Array[Byte]` that provides sane implementations of `hashCode`, `equals`, and `toString`. The + * wrapped array of bytes is assumed to be never modified. * - * Note: Unfortunately we cannot make [[Bytes]] a value class because a value class may not override the `hashCode` - * and `equals` methods (cf. SIP-15, criterion 4). + * Note: Unfortunately we cannot make [[Bytes]] a value class because a value class may not override the + * `hashCode` and `equals` methods (cf. SIP-15, criterion 4). * * =Alternatives= * - * Instead of wrapping an `Array[Byte]` with this class you can also convert an `Array[Byte]` to a `Seq[Byte]` via - * Scala's `toSeq` method: + * Instead of wrapping an `Array[Byte]` with this class you can also convert an `Array[Byte]` to a `Seq[Byte]` + * via Scala's `toSeq` method: * * {{{ * val arrayByte: Array[Byte] = Array(1.toByte) @@ -21,12 +21,14 @@ import java.nio.ByteBuffer * * Like [[Bytes]], a `Seq[Byte]` has sane `hashCode`, `equals`, and `toString` implementations. * - * Performance-wise we found that a `Seq[Byte]` is comparable to [[Bytes]]. For example, a `CMS[Seq[Byte]]` was - * measured to be only slightly slower than `CMS[Bytes]` (think: single-digit percentages). + * Performance-wise we found that a `Seq[Byte]` is comparable to [[Bytes]]. For example, a `CMS[Seq[Byte]]` + * was measured to be only slightly slower than `CMS[Bytes]` (think: single-digit percentages). * - * @param array the wrapped array of bytes + * @param array + * the wrapped array of bytes * - * @see [[MinHasher]] + * @see + * [[MinHasher]] */ final case class Bytes(array: Array[Byte]) extends java.io.Serializable { @@ -36,10 +38,11 @@ final case class Bytes(array: Array[Byte]) extends java.io.Serializable { scala.util.hashing.MurmurHash3.arrayHash(array, Bytes.HashSeed) /** - * Implementation detail: This `equals` method is defined in terms of the wrapped array, which is a mutable field. - * In general such a definition of `equals` is considered bad practice, but in our case we justify the use of a - * mutable field because the contract of [[Bytes]] requires that the wrapped array must never be modified (and we - * intentionally do not create a defensive, immutable copy because of performance considerations). + * Implementation detail: This `equals` method is defined in terms of the wrapped array, which is a mutable + * field. In general such a definition of `equals` is considered bad practice, but in our case we justify + * the use of a mutable field because the contract of [[Bytes]] requires that the wrapped array must never + * be modified (and we intentionally do not create a defensive, immutable copy because of performance + * considerations). */ override def equals(that: Any): Boolean = that match { case Bytes(thatArray) => java.util.Arrays.equals(array, thatArray) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala b/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala index 8d3c1069d..6db85268d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CMSHasher.scala @@ -1,19 +1,20 @@ package com.twitter.algebird /** - * The Count-Min sketch uses `d` (aka `depth`) pair-wise independent hash functions drawn from a universal hashing - * family of the form: + * The Count-Min sketch uses `d` (aka `depth`) pair-wise independent hash functions drawn from a universal + * hashing family of the form: * * `h(x) = [a * x + b (mod p)] (mod m)` * - * As a requirement for using CMS you must provide an implicit `CMSHasher[K]` for the type `K` of the items you want to - * count. Algebird ships with several such implicits for commonly used types `K` such as `Long` and `BigInt`. + * As a requirement for using CMS you must provide an implicit `CMSHasher[K]` for the type `K` of the items + * you want to count. Algebird ships with several such implicits for commonly used types `K` such as `Long` + * and `BigInt`. * - * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as Double, and then use the `contramap` - * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of `contramap` - * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a - * starting point. + * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" + * function to convert items of your (unsupported) type `K` to a supported type such as Double, and then use + * the `contramap` function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the + * documentation of `contramap` for an example); 2) You implement a `CMSHasher[K]` from scratch, using the + * existing CMSHasher implementations as a starting point. */ trait CMSHasher[K] extends java.io.Serializable { @@ -43,8 +44,8 @@ trait CMSHasher[K] extends java.io.Serializable { * def hash(a: Int, b: Int, width: Int)(x: L): CMSHasher[L] = CMSHasher[K].hash(a, b, width)(f(x)) * }}} * - * Be aware that the use of contramap may come at a cost (e.g. increased time) due to the translation calls between - * `K` and `L`. + * Be aware that the use of contramap may come at a cost (e.g. increased time) due to the translation calls + * between `K` and `L`. * * =Usage= * @@ -93,27 +94,32 @@ object CMSHasher { /** * =Implementation details= * - * This hash function is based upon Murmur3. Note that the original CMS paper requires - * `d` (depth) pair-wise independent hash functions; in the specific case of Murmur3 we argue that it is sufficient - * to pass `d` different seed values to Murmur3 to achieve a similar effect. + * This hash function is based upon Murmur3. Note that the original CMS paper requires `d` (depth) pair-wise + * independent hash functions; in the specific case of Murmur3 we argue that it is sufficient to pass `d` + * different seed values to Murmur3 to achieve a similar effect. * - * To seed Murmur3 we use only `a`, which is a randomly drawn `Int` via [[scala.util.Random]] in the CMS code. - * What is important to note is that we intentionally ignore `b`. Why? We need to ensure that we seed Murmur3 with - * a random value, notably one that is uniformly distributed. Somewhat surprisingly, combining two random values - * (such as `a` and `b` in our case) typically worsens the "randomness" of the combination, i.e. the combination is - * less uniformly distributed as either of its original inputs. Hence the combination of two random values is - * discouraged in this context, notably if the two random inputs were generated from the same source anyways, which - * is the case for us because we use Scala's PRNG only. + * To seed Murmur3 we use only `a`, which is a randomly drawn `Int` via [[scala.util.Random]] in the CMS + * code. What is important to note is that we intentionally ignore `b`. Why? We need to ensure that we seed + * Murmur3 with a random value, notably one that is uniformly distributed. Somewhat surprisingly, combining + * two random values (such as `a` and `b` in our case) typically worsens the "randomness" of the + * combination, i.e. the combination is less uniformly distributed as either of its original inputs. Hence + * the combination of two random values is discouraged in this context, notably if the two random inputs + * were generated from the same source anyways, which is the case for us because we use Scala's PRNG only. * * For further details please refer to the discussion * [[http://stackoverflow.com/questions/3956478/understanding-randomness Understanding Randomness]] on * StackOverflow. * - * @param a Must be a random value, typically created via [[scala.util.Random]]. - * @param b Ignored by this particular hash function, see the reasoning above for the justification. - * @param width Width of the CMS counting table, i.e. the width/size of each row in the counting table. - * @param x Item to be hashed. - * @return Slot assigned to item `x` in the vector of size `width`, where `x in [0, width)`. + * @param a + * Must be a random value, typically created via [[scala.util.Random]]. + * @param b + * Ignored by this particular hash function, see the reasoning above for the justification. + * @param width + * Width of the CMS counting table, i.e. the width/size of each row in the counting table. + * @param x + * Item to be hashed. + * @return + * Slot assigned to item `x` in the vector of size `width`, where `x in [0, width)`. */ private[algebird] def hashBytes(a: Int, b: Int, width: Int)(x: Array[Byte]): Int = { val _ = b // suppressing unused `b` diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Combinator.scala b/algebird-core/src/main/scala/com/twitter/algebird/Combinator.scala index 9bc23d6db..f908d7805 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Combinator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Combinator.scala @@ -16,19 +16,18 @@ limitations under the License. package com.twitter.algebird /** - * This is a combinator on semigroups, after you do the plus, you transform B with a fold function - * This will not be valid for all fold functions. You need to prove that it is still associative. + * This is a combinator on semigroups, after you do the plus, you transform B with a fold function This will + * not be valid for all fold functions. You need to prove that it is still associative. * * Clearly only values of (a,b) are valid if fold(a,b) == b, so keep that in mind. * - * I have not yet found a sufficient condition on (A,B) => B that makes it correct - * Clearly a (trivial) constant function {(l,r) => r} works. - * Also, if B is List[T], and (l:A,r:List[T]) = r.sortBy(fn(l)) - * this works as well (due to the associativity on A, and the fact that the list never loses data). + * I have not yet found a sufficient condition on (A,B) => B that makes it correct Clearly a (trivial) + * constant function {(l,r) => r} works. Also, if B is List[T], and (l:A,r:List[T]) = r.sortBy(fn(l)) this + * works as well (due to the associativity on A, and the fact that the list never loses data). * - * For approximate lists (like top-K applications) this might work (or be close enough to associative - * that for approximation algorithms it is fine), and in fact, that is the main motivation of this code: - * Produce some ordering in A, and use it to do sorted-topK on the list in B. + * For approximate lists (like top-K applications) this might work (or be close enough to associative that for + * approximation algorithms it is fine), and in fact, that is the main motivation of this code: Produce some + * ordering in A, and use it to do sorted-topK on the list in B. * * Seems like an open topic here.... you are obliged to think on your own about this. */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala index d376735dc..a4274ebc8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala @@ -11,12 +11,12 @@ object Correlation { } /** - * A class to calculate covariance and the first two central moments of a sequence of pairs of Doubles, from which the - * pearson correlation coeifficient can be calculated. + * A class to calculate covariance and the first two central moments of a sequence of pairs of Doubles, from + * which the pearson correlation coeifficient can be calculated. * - * m{i}x denotes the ith central moment of the first projection of the pair. - * m{i}y denotes the ith central moment of the second projection of the pair. - * c2 the covariance equivalent of the second central moment, i.e. c2 = Sum_(x,y) (x - m1x)*(y - m1y). + * m{i}x denotes the ith central moment of the first projection of the pair. m{i}y denotes the ith central + * moment of the second projection of the pair. c2 the covariance equivalent of the second central moment, + * i.e. c2 = Sum_(x,y) (x - m1x)*(y - m1y). */ case class Correlation(c2: Double, m2x: Double, m2y: Double, m1x: Double, m1y: Double, m0: Double) { def totalWeight: Double = m0 @@ -38,7 +38,8 @@ case class Correlation(c2: Double, m2x: Double, m2y: Double, m1x: Double, m1y: D def covariance: Double = c2 / totalWeight /** - * @return Pearson's correlation coefficient + * @return + * Pearson's correlation coefficient */ def correlation: Double = // correlation is defined as: covariance / (varianceLeft * varianceRight) @@ -47,10 +48,12 @@ case class Correlation(c2: Double, m2x: Double, m2y: Double, m1x: Double, m1y: D c2 / (Math.sqrt(m2x * m2y)) /** - * Assume this instance of Correlation came from summing together Correlation.apply((x_i, y_i)) for i in 1...n. + * Assume this instance of Correlation came from summing together Correlation.apply((x_i, y_i)) for i in + * 1...n. * - * @return (m, b) where y = mx + b is the line with the least squares fit of the points (x_i, y_i). - * See, e.g. https://mathworld.wolfram.com/LeastSquaresFitting.html. + * @return + * (m, b) where y = mx + b is the line with the least squares fit of the points (x_i, y_i). See, e.g. + * https://mathworld.wolfram.com/LeastSquaresFitting.html. */ def linearLeastSquares: (Double, Double) = { val m = c2 / m2x @@ -75,16 +78,15 @@ case class Correlation(c2: Double, m2x: Double, m2y: Double, m1x: Double, m1y: D object CorrelationMonoid extends Monoid[Correlation] { /** - * The algorithm for combining the correlation calculations from two partitions of pairs of numbers. Comes from - * Pébay, Philippe (2008), "Formulas for Robust, One-Pass Parallel Computation of Covariances and Arbitrary-Order Statistical Moments", - * Technical Report SAND2008-6212, Sandia National Laboratories + * The algorithm for combining the correlation calculations from two partitions of pairs of numbers. Comes + * from Pébay, Philippe (2008), "Formulas for Robust, One-Pass Parallel Computation of Covariances and + * Arbitrary-Order Statistical Moments", Technical Report SAND2008-6212, Sandia National Laboratories * https://prod-ng.sandia.gov/techlib-noauth/access-control.cgi/2008/086212.pdf * - * Extending this to weights can be found in - * Schubert, Erich; Gertz, Michael (9 July 2018). Numerically stable parallel computation of (co-)variance. - * ACM. p. 10. doi:10.1145/3221269.3223036. ISBN 9781450365055. - * http://dl.acm.org/citation.cfm?id=3221269.3223036 - * https://dl.acm.org/doi/10.1145/3221269.3223036 + * Extending this to weights can be found in Schubert, Erich; Gertz, Michael (9 July 2018). Numerically + * stable parallel computation of (co-)variance. ACM. p. 10. doi:10.1145/3221269.3223036. ISBN + * 9781450365055. http://dl.acm.org/citation.cfm?id=3221269.3223036 + * https://dl.acm.org/doi/10.1145/3221269.3223036 */ override def plus(a: Correlation, b: Correlation): Correlation = { val count = a.totalWeight + b.totalWeight diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index 7d16875d8..3e8d47c39 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -21,43 +21,38 @@ import algebra.CommutativeMonoid import scala.collection.compat._ /** - * A Count-Min sketch is a probabilistic data structure used for summarizing - * streams of data in sub-linear space. + * A Count-Min sketch is a probabilistic data structure used for summarizing streams of data in sub-linear + * space. * - * It works as follows. Let `(eps, delta)` be two parameters that describe the - * confidence in our error estimates, and let `d = ceil(ln 1/delta)` - * and `w = ceil(e / eps)`. + * It works as follows. Let `(eps, delta)` be two parameters that describe the confidence in our error + * estimates, and let `d = ceil(ln 1/delta)` and `w = ceil(e / eps)`. * - * Note: Throughout the code `d` and `w` are called `depth` and `width`, - * respectively. + * Note: Throughout the code `d` and `w` are called `depth` and `width`, respectively. * * Then: * - * - Take `d` pairwise independent hash functions `h_i`, each of which maps - * onto the domain `[0, w - 1]`. - * - Create a 2-dimensional table of counts, with `d` rows and `w` columns, - * initialized with all zeroes. - * - When a new element x arrives in the stream, update the table of counts - * by setting `counts[i, h_i[x]] += 1`, for each `1 <= i <= d`. - * - (Note the rough similarity to a Bloom filter.) + * - Take `d` pairwise independent hash functions `h_i`, each of which maps onto the domain `[0, w - 1]`. + * - Create a 2-dimensional table of counts, with `d` rows and `w` columns, initialized with all zeroes. + * - When a new element x arrives in the stream, update the table of counts by setting `counts[i, h_i[x]] += + * 1`, for each `1 <= i <= d`. + * - (Note the rough similarity to a Bloom filter.) * - * As an example application, suppose you want to estimate the number of - * times an element `x` has appeared in a data stream so far. - * The Count-Min sketch estimate of this frequency is + * As an example application, suppose you want to estimate the number of times an element `x` has appeared in + * a data stream so far. The Count-Min sketch estimate of this frequency is * - * min_i { counts[i, h_i[x]] } + * min_i { counts[i, h_i[x]] } * - * With probability at least `1 - delta`, this estimate is within `eps * N` - * of the true frequency (i.e., `true frequency <= estimate <= true frequency + eps * N`), - * where N is the total size of the stream so far. + * With probability at least `1 - delta`, this estimate is within `eps * N` of the true frequency (i.e., `true + * frequency <= estimate <= true frequency + eps * N`), where N is the total size of the stream so far. * - * See http://www.eecs.harvard.edu/~michaelm/CS222/countmin.pdf for technical details, - * including proofs of the estimates and error bounds used in this implementation. + * See http://www.eecs.harvard.edu/~michaelm/CS222/countmin.pdf for technical details, including proofs of the + * estimates and error bounds used in this implementation. * * Parts of this implementation are taken from * https://github.com/clearspring/stream-lib/blob/master/src/main/java/com/clearspring/analytics/stream/frequency/CountMinSketch.java * - * @author Edwin Chen + * @author + * Edwin Chen */ /** * Monoid for adding CMS sketches. @@ -65,40 +60,43 @@ import scala.collection.compat._ * =Usage= * * `eps` and `delta` are parameters that bound the error of each query estimate. For example, errors in - * answering point queries (e.g., how often has element x appeared in the stream described by the sketch?) - * are often of the form: "with probability p >= 1 - delta, the estimate is close to the truth by - * some factor depending on eps." - * - * The type `K` is the type of items you want to count. You must provide an implicit `CMSHasher[K]` for `K`, and - * Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. - * - * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as Double, and then use the `contramap` - * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of [[CMSHasher]] - * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a - * starting point. - * - * Note: Because Arrays in Scala/Java not have sane `equals` and `hashCode` implementations, you cannot safely use types - * such as `Array[Byte]`. Extra work is required for Arrays. For example, you may opt to convert `Array[T]` to a - * `Seq[T]` via `toSeq`, or you can provide appropriate wrapper classes. Algebird provides one such wrapper class, - * [[Bytes]], to safely wrap an `Array[Byte]` for use with CMS. - * - * @param eps One-sided error bound on the error of each point query, i.e. frequency estimate. - * @param delta A bound on the probability that a query estimate does not lie within some small interval - * (an interval that depends on `eps`) around the truth. - * @param seed A seed to initialize the random number generator used to create the pairwise independent - * hash functions. - * @param maxExactCountOpt An Option parameter about how many exact counts a sparse CMS wants to keep. - * @tparam K The type used to identify the elements to be counted. For example, if you want to count the occurrence of - * user names, you could map each username to a unique numeric ID expressed as a `Long`, and then count the - * occurrences of those `Long`s with a CMS of type `K=Long`. Note that this mapping between the elements of - * your problem domain and their identifiers used for counting via CMS should be bijective. - * We require a [[CMSHasher]] context bound for `K`, see [[CMSHasherImplicits]] for available implicits that - * can be imported. - * Which type K should you pick in practice? For domains that have less than `2^64` unique elements, you'd - * typically use `Long`. For larger domains you can try `BigInt`, for example. Other possibilities - * include Spire's `SafeLong` and `Numerical` data types (https://github.com/non/spire), though Algebird does - * not include the required implicits for CMS-hashing (cf. [[CMSHasherImplicits]]. + * answering point queries (e.g., how often has element x appeared in the stream described by the sketch?) are + * often of the form: "with probability p >= 1 - delta, the estimate is close to the truth by some factor + * depending on eps." + * + * The type `K` is the type of items you want to count. You must provide an implicit `CMSHasher[K]` for `K`, + * and Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. + * + * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" + * function to convert items of your (unsupported) type `K` to a supported type such as Double, and then use + * the `contramap` function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the + * documentation of [[CMSHasher]] for an example); 2) You implement a `CMSHasher[K]` from scratch, using the + * existing CMSHasher implementations as a starting point. + * + * Note: Because Arrays in Scala/Java not have sane `equals` and `hashCode` implementations, you cannot safely + * use types such as `Array[Byte]`. Extra work is required for Arrays. For example, you may opt to convert + * `Array[T]` to a `Seq[T]` via `toSeq`, or you can provide appropriate wrapper classes. Algebird provides one + * such wrapper class, [[Bytes]], to safely wrap an `Array[Byte]` for use with CMS. + * + * @param eps + * One-sided error bound on the error of each point query, i.e. frequency estimate. + * @param delta + * A bound on the probability that a query estimate does not lie within some small interval (an interval + * that depends on `eps`) around the truth. + * @param seed + * A seed to initialize the random number generator used to create the pairwise independent hash functions. + * @param maxExactCountOpt + * An Option parameter about how many exact counts a sparse CMS wants to keep. + * @tparam K + * The type used to identify the elements to be counted. For example, if you want to count the occurrence of + * user names, you could map each username to a unique numeric ID expressed as a `Long`, and then count the + * occurrences of those `Long`s with a CMS of type `K=Long`. Note that this mapping between the elements of + * your problem domain and their identifiers used for counting via CMS should be bijective. We require a + * [[CMSHasher]] context bound for `K`, see [[CMSHasherImplicits]] for available implicits that can be + * imported. Which type K should you pick in practice? For domains that have less than `2^64` unique + * elements, you'd typically use `Long`. For larger domains you can try `BigInt`, for example. Other + * possibilities include Spire's `SafeLong` and `Numerical` data types (https://github.com/non/spire), + * though Algebird does not include the required implicits for CMS-hashing (cf. [[CMSHasherImplicits]]. */ class CMSMonoid[K: CMSHasher](eps: Double, delta: Double, seed: Int, maxExactCountOpt: Option[Int] = None) extends Monoid[CMS[K]] @@ -146,11 +144,9 @@ class CMSMonoid[K: CMSHasher](eps: Double, delta: Double, seed: Int, maxExactCou } /** - * This mutable builder can be used when speed is essential - * and you can be sure the scope of the mutability cannot escape - * in an unsafe way. The intended use is to allocate and call - * result in one method without letting a reference to the instance - * escape into a closure. + * This mutable builder can be used when speed is essential and you can be sure the scope of the mutability + * cannot escape in an unsafe way. The intended use is to allocate and call result in one method without + * letting a reference to the instance escape into a closure. */ class CMSSummation[K](params: CMSParams[K]) { private[this] val hashes = params.hashes.toArray @@ -224,7 +220,7 @@ class CMSSummation[K](params: CMSParams[K]) { } /** - * An Aggregator for [[CMS]]. Can be created using CMS.aggregator. + * An Aggregator for [[CMS]]. Can be created using CMS.aggregator. */ case class CMSAggregator[K](cmsMonoid: CMSMonoid[K]) extends MonoidAggregator[K, CMS[K], CMS[K]] { override val monoid: CMSMonoid[K] = cmsMonoid @@ -238,13 +234,18 @@ case class CMSAggregator[K](cmsMonoid: CMSMonoid[K]) extends MonoidAggregator[K, /** * Configuration parameters for [[CMS]]. * - * @param hashes Pair-wise independent hashes functions. We need `N=depth` such functions (`depth` can be derived from - * `delta`). - * @param eps One-sided error bound on the error of each point query, i.e. frequency estimate. - * @param delta A bound on the probability that a query estimate does not lie within some small interval - * (an interval that depends on `eps`) around the truth. - * @param maxExactCountOpt An Option parameter about how many exact counts a sparse CMS wants to keep. - * @tparam K The type used to identify the elements to be counted. + * @param hashes + * Pair-wise independent hashes functions. We need `N=depth` such functions (`depth` can be derived from + * `delta`). + * @param eps + * One-sided error bound on the error of each point query, i.e. frequency estimate. + * @param delta + * A bound on the probability that a query estimate does not lie within some small interval (an interval + * that depends on `eps`) around the truth. + * @param maxExactCountOpt + * An Option parameter about how many exact counts a sparse CMS wants to keep. + * @tparam K + * The type used to identify the elements to be counted. */ case class CMSParams[K]( hashes: Seq[CMSHash[K]], @@ -322,12 +323,17 @@ object CMSFunctions { /** * Generates `N=depth` pair-wise independent hash functions. * - * @param eps One-sided error bound on the error of each point query, i.e. frequency estimate. - * @param delta Error bound on the probability that a query estimate does NOT lie within some small interval around - * the truth. - * @param seed Seed for the random number generator. - * @tparam K The type used to identify the elements to be counted. - * @return The generated hash functions. + * @param eps + * One-sided error bound on the error of each point query, i.e. frequency estimate. + * @param delta + * Error bound on the probability that a query estimate does NOT lie within some small interval around the + * truth. + * @param seed + * Seed for the random number generator. + * @tparam K + * The type used to identify the elements to be counted. + * @return + * The generated hash functions. */ def generateHashes[K: CMSHasher](eps: Double, delta: Double, seed: Int): Seq[CMSHash[K]] = { // Typically, we would use d -- aka depth -- pair-wise independent hash functions of the form @@ -348,13 +354,15 @@ object CMSFunctions { } /** - * A trait for CMS implementations that can count elements in a data stream and that can answer point queries (i.e. - * frequency estimates) for these elements. + * A trait for CMS implementations that can count elements in a data stream and that can answer point queries + * (i.e. frequency estimates) for these elements. * * Known implementations: [[CMS]], [[TopCMS]]. * - * @tparam K The type used to identify the elements to be counted. - * @tparam C The type of the actual CMS that implements this trait. + * @tparam K + * The type used to identify the elements to be counted. + * @tparam C + * The type of the actual CMS that implements this trait. */ trait CMSCounting[K, C[_]] { @@ -364,19 +372,20 @@ trait CMSCounting[K, C[_]] { def eps: Double /** - * Returns the bound on the probability that a query estimate does NOT lie within some small interval (an interval - * that depends on `eps`) around the truth. + * Returns the bound on the probability that a query estimate does NOT lie within some small interval (an + * interval that depends on `eps`) around the truth. */ def delta: Double /** - * Number of hash functions (also: number of rows in the counting table). This number is derived from `delta`. + * Number of hash functions (also: number of rows in the counting table). This number is derived from + * `delta`. */ def depth: Int = CMSFunctions.depth(delta) /** - * Number of counters per hash function (also: number of columns in the counting table). This number is derived from - * `eps`. + * Number of counters per hash function (also: number of columns in the counting table). This number is + * derived from `eps`. */ def width: Int = CMSFunctions.width(eps) @@ -407,27 +416,25 @@ trait CMSCounting[K, C[_]] { def +(item: K, count: Long): C[K] /** - * Returns an estimate of the total number of times this item has been seen - * in the stream so far. This estimate is an upper bound. + * Returns an estimate of the total number of times this item has been seen in the stream so far. This + * estimate is an upper bound. * - * It is always true that `estimatedFrequency >= trueFrequency`. - * With probability `p >= 1 - delta`, it also holds that - * `estimatedFrequency <= trueFrequency + eps * totalCount`. + * It is always true that `estimatedFrequency >= trueFrequency`. With probability `p >= 1 - delta`, it also + * holds that `estimatedFrequency <= trueFrequency + eps * totalCount`. */ def frequency(item: K): Approximate[Long] /** * Returns an estimate of the inner product against another data stream. * - * In other words, let a_i denote the number of times element i has been seen in - * the data stream summarized by this CMS, and let b_i denote the same for the other CMS. - * Then this returns an estimate of ` = \sum a_i b_i`. + * In other words, let a_i denote the number of times element i has been seen in the data stream summarized + * by this CMS, and let b_i denote the same for the other CMS. Then this returns an estimate of ` = + * \sum a_i b_i`. * * Note: This can also be viewed as the join size between two relations. * - * It is always true that actualInnerProduct <= estimatedInnerProduct. - * With probability `p >= 1 - delta`, it also holds that - * `estimatedInnerProduct <= actualInnerProduct + eps * thisTotalCount * otherTotalCount`. + * It is always true that actualInnerProduct <= estimatedInnerProduct. With probability `p >= 1 - delta`, it + * also holds that `estimatedInnerProduct <= actualInnerProduct + eps * thisTotalCount * otherTotalCount`. */ def innerProduct(other: C[K]): Approximate[Long] @@ -451,13 +458,14 @@ trait CMSCounting[K, C[_]] { /** * A trait for CMS implementations that can track heavy hitters in a data stream. * - * It is up to the implementation how the semantics of tracking heavy hitters are defined. For instance, one - * implementation could track the "top %" heavy hitters whereas another implementation could track the "top N" heavy - * hitters. + * It is up to the implementation how the semantics of tracking heavy hitters are defined. For instance, one + * implementation could track the "top %" heavy hitters whereas another implementation could track the "top N" + * heavy hitters. * * Known implementations: [[TopCMS]]. * - * @tparam K The type used to identify the elements to be counted. + * @tparam K + * The type used to identify the elements to be counted. */ trait CMSHeavyHitters[K] { @@ -530,7 +538,8 @@ object CMS { } /** - * A Count-Min sketch data structure that allows for counting and frequency estimation of elements in a data stream. + * A Count-Min sketch data structure that allows for counting and frequency estimation of elements in a data + * stream. * * Tip: If you also need to track heavy hitters ("Top N" problems), take a look at [[TopCMS]]. * @@ -538,28 +547,23 @@ object CMS { * * This example demonstrates how to count `Long` elements with [[CMS]], i.e. `K=Long`. * - * Note that the actual counting is always performed with a `Long`, regardless of your choice of `K`. That is, - * the counting table behind the scenes is backed by `Long` values (at least in the current implementation), and thus - * the returned frequency estimates are always instances of `Approximate[Long]`. + * Note that the actual counting is always performed with a `Long`, regardless of your choice of `K`. That is, + * the counting table behind the scenes is backed by `Long` values (at least in the current implementation), + * and thus the returned frequency estimates are always instances of `Approximate[Long]`. * - * @example {{{ + * @example + * {{{ * - * // Creates a monoid for a CMS that can count `Long` elements. - * val cmsMonoid: CMSMonoid[Long] = { - * val eps = 0.001 - * val delta = 1E-10 - * val seed = 1 - * CMS.monoid[Long](eps, delta, seed) - * } + * // Creates a monoid for a CMS that can count `Long` elements. val cmsMonoid: CMSMonoid[Long] = { val eps = + * 0.001 val delta = 1E-10 val seed = 1 CMS.monoid[Long](eps, delta, seed) } * - * // Creates a CMS instance that has counted the element `1L`. - * val cms: CMS[Long] = cmsMonoid.create(1L) + * // Creates a CMS instance that has counted the element `1L`. val cms: CMS[Long] = cmsMonoid.create(1L) * - * // Estimates the frequency of `1L` - * val estimate: Approximate[Long] = cms.frequency(1L) + * // Estimates the frequency of `1L` val estimate: Approximate[Long] = cms.frequency(1L) * }}} * - * @tparam K The type used to identify the elements to be counted. + * @tparam K + * The type used to identify the elements to be counted. */ sealed abstract class CMS[K](val params: CMSParams[K]) extends java.io.Serializable with CMSCounting[K, CMS] { @@ -574,7 +578,7 @@ sealed abstract class CMS[K](val params: CMSParams[K]) extends java.io.Serializa } /** - * Zero element. Used for initialization. + * Zero element. Used for initialization. */ case class CMSZero[K](override val params: CMSParams[K]) extends CMS[K](params) { @@ -729,10 +733,9 @@ case class CMSInstance[K]( } /** - * Let X be a CMS, and let count_X[j, k] denote the value in X's 2-dimensional count table at row j and column k. - * Then the Count-Min sketch estimate of the inner product between A and B is the minimum inner product between their - * rows: - * estimatedInnerProduct = min_j (\sum_k count_A[j, k] * count_B[j, k]|) + * Let X be a CMS, and let count_X[j, k] denote the value in X's 2-dimensional count table at row j and + * column k. Then the Count-Min sketch estimate of the inner product between A and B is the minimum inner + * product between their rows: estimatedInnerProduct = min_j (\sum_k count_A[j, k] * count_B[j, k]|) */ override def innerProduct(other: CMS[K]): Approximate[Long] = other match { @@ -776,8 +779,8 @@ object CMSInstance { } /** - * The 2-dimensional table of counters used in the Count-Min sketch. - * Each row corresponds to a particular hash function. + * The 2-dimensional table of counters used in the Count-Min sketch. Each row corresponds to a particular + * hash function. */ // TODO: implement a dense matrix type, and use it here case class CountsTable[K](counts: Vector[Vector[Long]]) { @@ -839,42 +842,36 @@ object CMSInstance { case class TopCMSParams[K](logic: HeavyHittersLogic[K]) /** - * A Count-Min sketch data structure that allows for (a) counting and frequency estimation of elements in a data stream - * and (b) tracking the heavy hitters among these elements. + * A Count-Min sketch data structure that allows for (a) counting and frequency estimation of elements in a + * data stream and (b) tracking the heavy hitters among these elements. * * The logic of how heavy hitters are computed is pluggable, see [[HeavyHittersLogic]]. * - * Tip: If you do not need to track heavy hitters, take a look at [[CMS]], which is more efficient in this case. + * Tip: If you do not need to track heavy hitters, take a look at [[CMS]], which is more efficient in this + * case. * * =Usage= * * This example demonstrates how to count `Long` elements with [[TopCMS]], i.e. `K=Long`. * - * Note that the actual counting is always performed with a `Long`, regardless of your choice of `K`. That is, - * the counting table behind the scenes is backed by `Long` values (at least in the current implementation), and thus - * the returned frequency estimates are always instances of `Approximate[Long]`. + * Note that the actual counting is always performed with a `Long`, regardless of your choice of `K`. That is, + * the counting table behind the scenes is backed by `Long` values (at least in the current implementation), + * and thus the returned frequency estimates are always instances of `Approximate[Long]`. * - * @example {{{ - * // Creates a monoid for a CMS that can count `Long` elements. - * val topPctCMSMonoid: TopPctCMSMonoid[Long] = { - * val eps = 0.001 - * val delta = 1E-10 - * val seed = 1 - * val heavyHittersPct = 0.1 - * TopPctCMS.monoid[Long](eps, delta, seed, heavyHittersPct) - * } + * @example + * {{{ // Creates a monoid for a CMS that can count `Long` elements. val topPctCMSMonoid: + * TopPctCMSMonoid[Long] = { val eps = 0.001 val delta = 1E-10 val seed = 1 val heavyHittersPct = 0.1 + * TopPctCMS.monoid[Long](eps, delta, seed, heavyHittersPct) } * - * // Creates a TopCMS instance that has counted the element `1L`. - * val topCMS: TopCMS[Long] = topPctCMSMonoid.create(1L) + * // Creates a TopCMS instance that has counted the element `1L`. val topCMS: TopCMS[Long] = + * topPctCMSMonoid.create(1L) * - * // Estimates the frequency of `1L` - * val estimate: Approximate[Long] = topCMS.frequency(1L) + * // Estimates the frequency of `1L` val estimate: Approximate[Long] = topCMS.frequency(1L) * - * // What are the heavy hitters so far? - * val heavyHitters: Set[Long] = topCMS.heavyHitters - * }}} + * // What are the heavy hitters so far? val heavyHitters: Set[Long] = topCMS.heavyHitters }}} * - * @tparam K The type used to identify the elements to be counted. + * @tparam K + * The type used to identify the elements to be counted. */ sealed abstract class TopCMS[K](val cms: CMS[K], params: TopCMSParams[K]) extends java.io.Serializable @@ -904,7 +901,7 @@ sealed abstract class TopCMS[K](val cms: CMS[K], params: TopCMSParams[K]) } /** - * Zero element. Used for initialization. + * Zero element. Used for initialization. */ case class TopCMSZero[K](override val cms: CMS[K], params: TopCMSParams[K]) extends TopCMS[K](cms, params) { @@ -1061,15 +1058,17 @@ abstract class HeavyHittersLogic[K] extends java.io.Serializable { } /** - * Finds all heavy hitters, i.e., elements in the stream that appear at least `(heavyHittersPct * totalCount)` times. + * Finds all heavy hitters, i.e., elements in the stream that appear at least `(heavyHittersPct * totalCount)` + * times. * - * Every item that appears at least `(heavyHittersPct * totalCount)` times is output, and with probability - * `p >= 1 - delta`, no item whose count is less than `(heavyHittersPct - eps) * totalCount` is output. + * Every item that appears at least `(heavyHittersPct * totalCount)` times is output, and with probability `p + * >= 1 - delta`, no item whose count is less than `(heavyHittersPct - eps) * totalCount` is output. * - * This also means that this parameter is an upper bound on the number of heavy hitters that will be tracked: the set - * of heavy hitters contains at most `1 / heavyHittersPct` elements. For example, if `heavyHittersPct=0.01` (or - * 0.25), then at most `1 / 0.01 = 100` items (or `1 / 0.25 = 4` items) will be tracked/returned as heavy hitters. - * This parameter can thus control the memory footprint required for tracking heavy hitters. + * This also means that this parameter is an upper bound on the number of heavy hitters that will be tracked: + * the set of heavy hitters contains at most `1 / heavyHittersPct` elements. For example, if + * `heavyHittersPct=0.01` (or 0.25), then at most `1 / 0.01 = 100` items (or `1 / 0.25 = 4` items) will be + * tracked/returned as heavy hitters. This parameter can thus control the memory footprint required for + * tracking heavy hitters. */ case class TopPctLogic[K](heavyHittersPct: Double) extends HeavyHittersLogic[K] { @@ -1085,13 +1084,14 @@ case class TopPctLogic[K](heavyHittersPct: Double) extends HeavyHittersLogic[K] /** * Tracks the top N heavy hitters, where `N` is defined by `heavyHittersN`. * - * '''Warning:''' top-N computations are not associative. The effect is that a top-N CMS has an ordering bias (with - * regard to heavy hitters) when merging instances. This means merging heavy hitters across CMS instances may lead to - * incorrect, biased results: the outcome is biased by the order in which CMS instances / heavy hitters are being - * merged, with the rule of thumb being that the earlier a set of heavy hitters is being merged, the more likely is - * the end result biased towards these heavy hitters. + * '''Warning:''' top-N computations are not associative. The effect is that a top-N CMS has an ordering bias + * (with regard to heavy hitters) when merging instances. This means merging heavy hitters across CMS + * instances may lead to incorrect, biased results: the outcome is biased by the order in which CMS instances + * / heavy hitters are being merged, with the rule of thumb being that the earlier a set of heavy hitters is + * being merged, the more likely is the end result biased towards these heavy hitters. * - * @see Discussion in [[https://github.com/twitter/algebird/issues/353 Algebird issue 353]] + * @see + * Discussion in [[https://github.com/twitter/algebird/issues/353 Algebird issue 353]] */ case class TopNLogic[K](heavyHittersN: Int) extends HeavyHittersLogic[K] { @@ -1141,31 +1141,33 @@ case class HeavyHitter[K](item: K, count: Long) extends java.io.Serializable * * =Usage= * - * The type `K` is the type of items you want to count. You must provide an implicit `CMSHasher[K]` for `K`, and - * Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. - * - * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as Double, and then use the `contramap` - * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of [[CMSHasher]] - * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a - * starting point. - * - * Note: Because Arrays in Scala/Java not have sane `equals` and `hashCode` implementations, you cannot safely use types - * such as `Array[Byte]`. Extra work is required for Arrays. For example, you may opt to convert `Array[T]` to a - * `Seq[T]` via `toSeq`, or you can provide appropriate wrapper classes. Algebird provides one such wrapper class, - * [[Bytes]], to safely wrap an `Array[Byte]` for use with CMS. - * - * @param cms A [[CMS]] instance, which is used for the counting and the frequency estimation performed by this class. - * @param heavyHittersPct A threshold for finding heavy hitters, i.e., elements that appear at least - * (heavyHittersPct * totalCount) times in the stream. - * @tparam K The type used to identify the elements to be counted. For example, if you want to count the occurrence of - * user names, you could map each username to a unique numeric ID expressed as a `Long`, and then count the - * occurrences of those `Long`s with a CMS of type `K=Long`. Note that this mapping between the elements of - * your problem domain and their identifiers used for counting via CMS should be bijective. - * We require a [[CMSHasher]] context bound for `K`, see [[CMSHasher]] for available implicits that - * can be imported. - * Which type K should you pick in practice? For domains that have less than `2^64` unique elements, you'd - * typically use `Long`. For larger domains you can try `BigInt`, for example. + * The type `K` is the type of items you want to count. You must provide an implicit `CMSHasher[K]` for `K`, + * and Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. + * + * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" + * function to convert items of your (unsupported) type `K` to a supported type such as Double, and then use + * the `contramap` function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the + * documentation of [[CMSHasher]] for an example); 2) You implement a `CMSHasher[K]` from scratch, using the + * existing CMSHasher implementations as a starting point. + * + * Note: Because Arrays in Scala/Java not have sane `equals` and `hashCode` implementations, you cannot safely + * use types such as `Array[Byte]`. Extra work is required for Arrays. For example, you may opt to convert + * `Array[T]` to a `Seq[T]` via `toSeq`, or you can provide appropriate wrapper classes. Algebird provides one + * such wrapper class, [[Bytes]], to safely wrap an `Array[Byte]` for use with CMS. + * + * @param cms + * A [[CMS]] instance, which is used for the counting and the frequency estimation performed by this class. + * @param heavyHittersPct + * A threshold for finding heavy hitters, i.e., elements that appear at least (heavyHittersPct * totalCount) + * times in the stream. + * @tparam K + * The type used to identify the elements to be counted. For example, if you want to count the occurrence of + * user names, you could map each username to a unique numeric ID expressed as a `Long`, and then count the + * occurrences of those `Long`s with a CMS of type `K=Long`. Note that this mapping between the elements of + * your problem domain and their identifiers used for counting via CMS should be bijective. We require a + * [[CMSHasher]] context bound for `K`, see [[CMSHasher]] for available implicits that can be imported. + * Which type K should you pick in practice? For domains that have less than `2^64` unique elements, you'd + * typically use `Long`. For larger domains you can try `BigInt`, for example. */ class TopPctCMSMonoid[K](cms: CMS[K], heavyHittersPct: Double = 0.01) extends TopCMSMonoid[K](cms, TopPctLogic[K](heavyHittersPct)) @@ -1202,62 +1204,67 @@ object TopPctCMS { } /** - * An Aggregator for [[TopPctCMS]]. Can be created using [[TopPctCMS.aggregator]]. + * An Aggregator for [[TopPctCMS]]. Can be created using [[TopPctCMS.aggregator]]. */ case class TopPctCMSAggregator[K](cmsMonoid: TopPctCMSMonoid[K]) extends TopCMSAggregator(cmsMonoid) /** - * Monoid for top-N based [[TopCMS]] sketches. '''Use with care! (see warning below)''' + * Monoid for top-N based [[TopCMS]] sketches. '''Use with care! (see warning below)''' * * =Warning: Adding top-N CMS instances (`++`) is an unsafe operation= * - * Top-N computations are not associative. The effect is that a top-N CMS has an ordering bias (with regard to heavy - * hitters) when ''merging'' CMS instances (e.g. via `++`). This means merging heavy hitters across CMS instances may - * lead to incorrect, biased results: the outcome is biased by the order in which CMS instances / heavy hitters are - * being merged, with the rule of thumb being that the earlier a set of heavy hitters is being merged, the more likely - * is the end result biased towards these heavy hitters. + * Top-N computations are not associative. The effect is that a top-N CMS has an ordering bias (with regard to + * heavy hitters) when ''merging'' CMS instances (e.g. via `++`). This means merging heavy hitters across CMS + * instances may lead to incorrect, biased results: the outcome is biased by the order in which CMS instances + * / heavy hitters are being merged, with the rule of thumb being that the earlier a set of heavy hitters is + * being merged, the more likely is the end result biased towards these heavy hitters. * - * The warning above only applies when ''adding CMS instances'' (think: `cms1 ++ cms2`). In comparison, heavy hitters - * are correctly computed when: + * The warning above only applies when ''adding CMS instances'' (think: `cms1 ++ cms2`). In comparison, heavy + * hitters are correctly computed when: * * - a top-N CMS instance is created from a single data stream, i.e. `Seq[K]` * - items are added/counted individually, i.e. `cms + item` or `cms + (item, count)`. * - * See the discussion in [[https://github.com/twitter/algebird/issues/353 Algebird issue 353]] for further details. + * See the discussion in [[https://github.com/twitter/algebird/issues/353 Algebird issue 353]] for further + * details. * * =Alternatives= * - * The following, alternative data structures may be better picks than a top-N based CMS given the warning above: + * The following, alternative data structures may be better picks than a top-N based CMS given the warning + * above: * * - [[TopPctCMS]]: Has safe merge semantics for its instances including heavy hitters. - * - [[SpaceSaver]]: Has the same ordering bias than a top-N CMS, but at least it provides bounds on the bias. + * - [[SpaceSaver]]: Has the same ordering bias than a top-N CMS, but at least it provides bounds on the + * bias. * * =Usage= * - * The type `K` is the type of items you want to count. You must provide an implicit `CMSHasher[K]` for `K`, and - * Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. - * - * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" function to - * convert items of your (unsupported) type `K` to a supported type such as [[Double]], and then use the `contramap` - * function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the documentation of [[CMSHasher]] - * for an example); 2) You implement a `CMSHasher[K]` from scratch, using the existing CMSHasher implementations as a - * starting point. - * - * Note: Because Arrays in Scala/Java not have sane `equals` and `hashCode` implementations, you cannot safely use types - * such as `Array[Byte]`. Extra work is required for Arrays. For example, you may opt to convert `Array[T]` to a - * `Seq[T]` via `toSeq`, or you can provide appropriate wrapper classes. Algebird provides one such wrapper class, - * [[Bytes]], to safely wrap an `Array[Byte]` for use with CMS. - * - * @param cms A [[CMS]] instance, which is used for the counting and the frequency estimation performed by this class. - * @param heavyHittersN The maximum number of heavy hitters to track. - * @tparam K The type used to identify the elements to be counted. For example, if you want to count the occurrence of - * user names, you could map each username to a unique numeric ID expressed as a `Long`, and then count the - * occurrences of those `Long`s with a CMS of type `K=Long`. Note that this mapping between the elements of - * your problem domain and their identifiers used for counting via CMS should be bijective. - * We require a [[CMSHasher]] context bound for `K`, see [[CMSHasher]] for available implicits that - * can be imported. - * Which type K should you pick in practice? For domains that have less than `2^64` unique elements, you'd - * typically use `Long`. For larger domains you can try `BigInt`, for example. + * The type `K` is the type of items you want to count. You must provide an implicit `CMSHasher[K]` for `K`, + * and Algebird ships with several such implicits for commonly used types such as `Long` and `BigInt`. + * + * If your type `K` is not supported out of the box, you have two options: 1) You provide a "translation" + * function to convert items of your (unsupported) type `K` to a supported type such as [[Double]], and then + * use the `contramap` function of [[CMSHasher]] to create the required `CMSHasher[K]` for your type (see the + * documentation of [[CMSHasher]] for an example); 2) You implement a `CMSHasher[K]` from scratch, using the + * existing CMSHasher implementations as a starting point. + * + * Note: Because Arrays in Scala/Java not have sane `equals` and `hashCode` implementations, you cannot safely + * use types such as `Array[Byte]`. Extra work is required for Arrays. For example, you may opt to convert + * `Array[T]` to a `Seq[T]` via `toSeq`, or you can provide appropriate wrapper classes. Algebird provides one + * such wrapper class, [[Bytes]], to safely wrap an `Array[Byte]` for use with CMS. + * + * @param cms + * A [[CMS]] instance, which is used for the counting and the frequency estimation performed by this class. + * @param heavyHittersN + * The maximum number of heavy hitters to track. + * @tparam K + * The type used to identify the elements to be counted. For example, if you want to count the occurrence of + * user names, you could map each username to a unique numeric ID expressed as a `Long`, and then count the + * occurrences of those `Long`s with a CMS of type `K=Long`. Note that this mapping between the elements of + * your problem domain and their identifiers used for counting via CMS should be bijective. We require a + * [[CMSHasher]] context bound for `K`, see [[CMSHasher]] for available implicits that can be imported. + * Which type K should you pick in practice? For domains that have less than `2^64` unique elements, you'd + * typically use `Long`. For larger domains you can try `BigInt`, for example. */ class TopNCMSMonoid[K](cms: CMS[K], heavyHittersN: Int = 100) extends TopCMSMonoid[K](cms, TopNLogic[K](heavyHittersN)) @@ -1284,13 +1291,12 @@ object TopNCMS { } /** - * An Aggregator for [[TopNCMS]]. Can be created using [[TopNCMS.aggregator]]. + * An Aggregator for [[TopNCMS]]. Can be created using [[TopNCMS.aggregator]]. */ case class TopNCMSAggregator[K](cmsMonoid: TopNCMSMonoid[K]) extends TopCMSAggregator(cmsMonoid) /** - * K1 defines a scope for the CMS. For each k1, keep the top heavyHittersN - * associated k2 values. + * K1 defines a scope for the CMS. For each k1, keep the top heavyHittersN associated k2 values. */ case class ScopedTopNLogic[K1, K2](heavyHittersN: Int) extends HeavyHittersLogic[(K1, K2)] { @@ -1395,9 +1401,8 @@ case class CMSHash[K: CMSHasher](a: Int, b: Int, width: Int) extends java.io.Ser /** * This formerly held the instances that moved to object CMSHasher * - * These instances are slow, but here for compatibility with old - * serialized data. For new code, avoid these and instead use the - * implicits found in the CMSHasher companion object. + * These instances are slow, but here for compatibility with old serialized data. For new code, avoid these + * and instead use the implicits found in the CMSHasher companion object. */ object CMSHasherImplicits { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala index 1350d208f..461785f9b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayedVector.scala @@ -17,9 +17,8 @@ limitations under the License. package com.twitter.algebird /** - * Represents a container class together with time. - * Its monoid consists of exponentially scaling the older value and summing with - * the newer one. + * Represents a container class together with time. Its monoid consists of exponentially scaling the older + * value and summing with the newer one. */ object DecayedVector extends CompatDecayedVector { def buildWithHalflife[C[_]](vector: C[Double], time: Double, halfLife: Double): DecayedVector[C] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala index 0ab0c7eb9..62ac2b5cb 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala @@ -7,25 +7,20 @@ import scala.concurrent.duration.Duration import scala.util.Random /** - * DecayingCMS is a module to build count-min sketch instances whose - * counts decay exponentially. + * DecayingCMS is a module to build count-min sketch instances whose counts decay exponentially. * - * Similar to a Map[K, com.twitter.algebird.DecayedValue], each key is - * associated with a single count value that decays over time. Unlike - * a map, the decyaing CMS is an approximate count -- in exchange for - * the possibility of over-counting, we can bound its size in memory. + * Similar to a Map[K, com.twitter.algebird.DecayedValue], each key is associated with a single count value + * that decays over time. Unlike a map, the decyaing CMS is an approximate count -- in exchange for the + * possibility of over-counting, we can bound its size in memory. * - * The intended use case is for metrics or machine learning where - * exact values aren't needed. + * The intended use case is for metrics or machine learning where exact values aren't needed. * - * You can expect the keys with the biggest values to be fairly - * accurate but the very small values (rare keys or very old keys) to - * be lost in the noise. For both metrics and ML this should be fine: - * you can't learn too much from very rare values. + * You can expect the keys with the biggest values to be fairly accurate but the very small values (rare keys + * or very old keys) to be lost in the noise. For both metrics and ML this should be fine: you can't learn too + * much from very rare values. * - * We recommend depth of at least 5, and width of at least 100, but - * you should do some experiments to determine the smallest parameters - * that will work for your use case. + * We recommend depth of at least 5, and width of at least 100, but you should do some experiments to + * determine the smallest parameters that will work for your use case. */ final class DecayingCMS[K]( seed: Long, @@ -56,15 +51,13 @@ final class DecayingCMS[K]( /** * Represents a decaying scalar value at a particular point in time. * - * The value decays according to halfLife. Another way to think - * about DoubleAt is that it represents a particular decay curve - * (and in particular, a point along that curve). Two DoubleAt - * values may be equivalent if they are two points on the same curve. + * The value decays according to halfLife. Another way to think about DoubleAt is that it represents a + * particular decay curve (and in particular, a point along that curve). Two DoubleAt values may be + * equivalent if they are two points on the same curve. * - * The `timeToZero` and `timeToUnit` methods can be used to - * "normalize" DoubleAt values. If two DoubleAt values do not - * produce the same (approximate) Double values from these methods, - * they represent different curves. + * The `timeToZero` and `timeToUnit` methods can be used to "normalize" DoubleAt values. If two DoubleAt + * values do not produce the same (approximate) Double values from these methods, they represent different + * curves. */ class DoubleAt private[algebird] (val value: Double, val timeInHL: Double) extends Serializable { lhs => @@ -99,9 +92,8 @@ final class DecayingCMS[K]( def /(rhs: DoubleAt): Double = map2(rhs)(_ / _).value /** - * We consider two DoubleAt values equal not just if their - * elements are equal, but also if they represent the same value - * at different points of decay. + * We consider two DoubleAt values equal not just if their elements are equal, but also if they represent + * the same value at different points of decay. */ def compare(rhs: DoubleAt): Int = { val vc = cmp(lhs.value, rhs.value) @@ -114,9 +106,8 @@ final class DecayingCMS[K]( } /** - * Time when this value will reach the smallest double value - * bigger than zero, unless we are already at zero in which - * case we return the current time + * Time when this value will reach the smallest double value bigger than zero, unless we are already at + * zero in which case we return the current time */ def timeToZero: Double = if (java.lang.Double.isNaN(value)) Double.NaN @@ -125,12 +116,10 @@ final class DecayingCMS[K]( else timeToUnit + DoubleAt.TimeFromUnitToZero /** - * This is the scaled time when the current value will reach - * 1 (or -1 for negative values) + * This is the scaled time when the current value will reach 1 (or -1 for negative values) * - * This method is a way of collapsing a DoubleAt into a single - * value (the time in the past or future where its value would be - * 1, the unit value). + * This method is a way of collapsing a DoubleAt into a single value (the time in the past or future where + * its value would be 1, the unit value). */ def timeToUnit: Double = if (java.lang.Double.isNaN(value)) Double.NaN @@ -206,9 +195,8 @@ final class DecayingCMS[K]( /** * Allocate an empty array of row. * - * The elements start as null. It's an important optimization _not_ - * to allocate vectors here, since we're often building up cells - * mutably. + * The elements start as null. It's an important optimization _not_ to allocate vectors here, since we're + * often building up cells mutably. */ private def allocCells(): Array[Vector[Double]] = new Array[Vector[Double]](depth) @@ -236,58 +224,45 @@ final class DecayingCMS[K]( private final val log2 = Math.log(2.0) /** - * The idealized formula for the updating current value for a key - * (y0 -> y1) is given as: + * The idealized formula for the updating current value for a key (y0 -> y1) is given as: * - * delta = (t1 - t0) / halflife - * y1 = y0 * 2^(-delta) + n + * delta = (t1 - t0) / halflife y1 = y0 * 2^(-delta) + n * - * However, we want to avoid having to rescale every single cell - * every time we update; i.e. a cell with a zero value should - * continue to have a zero value when n=0. + * However, we want to avoid having to rescale every single cell every time we update; i.e. a cell with a + * zero value should continue to have a zero value when n=0. * - * Therefore, we introduce a change of variable to cell values (z) - * along with a scale factor (scale), and the following formula: + * Therefore, we introduce a change of variable to cell values (z) along with a scale factor (scale), and + * the following formula: * - * (1) zN = yN * scaleN + * (1) zN = yN * scaleN * * Our constraint is expressed as: * - * (2) If n=0, z1 = z0 + * (2) If n=0, z1 = z0 * * In that case: * - * (3) If n=0, (y1 * scale1) = (y0 * scale0) - * (4) Substituting for y1, (y0 * 2^(-delta) + 0) * scale1 = y0 * scale0 - * (5) 2^(-delta) * scale1 = scale0 - * (6) scale1 = scale0 * 2^(delta) + * (3) If n=0, (y1 * scale1) = (y0 * scale0) (4) Substituting for y1, (y0 * 2^(-delta) + 0) * scale1 = y0 * + * scale0 (5) 2^(-delta) * scale1 = scale0 (6) scale1 = scale0 * 2^(delta) * * Also, to express z1 in terms of z0, we say: * - * (7) z1 = y1 * scale1 - * (8) z1 = (y0 * 2^(-delta) + n) * scale1 - * (9) z1 = ((z0 / scale0) * 2^(-delta) + n) * scale1 - * (10) z1 / scale1 = (z0 / (scale1 * 2^(-delta))) * 2^(-delta) + n - * (11) z1 / scale1 = z0 / scale1 + n - * (12) z1 = z0 + n * scale1 + * (7) z1 = y1 * scale1 (8) z1 = (y0 * 2^(-delta) + n) * scale1 (9) z1 = ((z0 / scale0) * 2^(-delta) + n) * + * scale1 (10) z1 / scale1 = (z0 / (scale1 * 2^(-delta))) * 2^(-delta) + n (11) z1 / scale1 = z0 / scale1 + + * n (12) z1 = z0 + n * scale1 * - * So, for cells where n=0, we just update scale0 to scale1, and for - * cells where n is non-zero, we update z1 in terms of z0 and - * scale1. + * So, for cells where n=0, we just update scale0 to scale1, and for cells where n is non-zero, we update z1 + * in terms of z0 and scale1. * * If we convert scale to logscale, we have: * - * (13) logscale1 = logscale0 + delta * log(2) - * (14) z1 = z0 + n * exp(logscale1) + * (13) logscale1 = logscale0 + delta * log(2) (14) z1 = z0 + n * exp(logscale1) * - * When logscale1 gets big, we start to distort z1. For example, - * exp(36) is close to 2^53. We can measure when n * exp(logscale1) - * gets big, and in those cases we can rescale all our cells (set - * each z to its corresponding y) and set the logscale to 0. + * When logscale1 gets big, we start to distort z1. For example, exp(36) is close to 2^53. We can measure + * when n * exp(logscale1) gets big, and in those cases we can rescale all our cells (set each z to its + * corresponding y) and set the logscale to 0. * - * (15) y1 = z1 / scale1 - * (16) y1 = z1 / exp(logscale1) - * (17) y1 = z1 * exp(-logscale1) + * (15) y1 = z1 / scale1 (16) y1 = z1 / exp(logscale1) (17) y1 = z1 * exp(-logscale1) */ final class CMS( val cells: Array[Vector[Double]], @@ -332,19 +307,15 @@ final class DecayingCMS[K]( toTimestamp(timeInHL) /** - * Provide lower and upper bounds on values returned for any - * possible key. + * Provide lower and upper bounds on values returned for any possible key. * - * The first value is a lower bound: even keys that have never - * been counted will return this value or greater. This will be - * zero unless the CMS is saturated. + * The first value is a lower bound: even keys that have never been counted will return this value or + * greater. This will be zero unless the CMS is saturated. * - * The second value is an upper bound: the key with the largest - * cardinality will not be reported as being larger than this - * value (though it might be reported as being smaller). + * The second value is an upper bound: the key with the largest cardinality will not be reported as being + * larger than this value (though it might be reported as being smaller). * - * Together these values indicate how saturated and skewed the CMS - * might be. + * Together these values indicate how saturated and skewed the CMS might be. */ def range: (DoubleAt, DoubleAt) = { var minMinimum = Double.PositiveInfinity @@ -371,18 +342,15 @@ final class DecayingCMS[K]( } /** - * Returns the square-root of the inner product of two decaying - * CMSs. + * Returns the square-root of the inner product of two decaying CMSs. * - * We want the result to decay at the same rate as the CMS for - * this method to be valid. Taking the square root ensures that - * this is true. Without it, we would violate the following - * equality (assuming we had at() on a CMS): + * We want the result to decay at the same rate as the CMS for this method to be valid. Taking the square + * root ensures that this is true. Without it, we would violate the following equality (assuming we had + * at() on a CMS): * - * x.innerProduct(y).at(t) = x.at(t).innerProduct(y.at(t)) + * x.innerProduct(y).at(t) = x.at(t).innerProduct(y.at(t)) * - * This is why we don't support innerProduct, only - * innerProductRoot. + * This is why we don't support innerProduct, only innerProductRoot. */ def innerProductRoot(that: CMS): DoubleAt = { var i = 0 @@ -423,12 +391,10 @@ final class DecayingCMS[K]( /** * Get the total count of all items in the CMS. * - * The total is the same as the l1Norm, since we don't allow - * negative values. + * The total is the same as the l1Norm, since we don't allow negative values. * - * Total is one of the few non-approximate statistics that - * DecayingCMS supports. We expect the total to be exact (except - * for floating-point error). + * Total is one of the few non-approximate statistics that DecayingCMS supports. We expect the total to be + * exact (except for floating-point error). */ def total: DoubleAt = { val n = cells(0).sum @@ -661,25 +627,21 @@ object DecayingCMS { /** * Construct a DecayingCMS module. * - * The seed is used to initialize the hash families used by the - * count-min sketch. Using the same seed will always produce the - * same hash family. + * The seed is used to initialize the hash families used by the count-min sketch. Using the same seed will + * always produce the same hash family. * - * Half-life determines the rate at which values in the CMS decay. - * If a key was counted once at time t, by time (t + halfLife), the - * value for that key will be 0.5. After enough half lives the value - * will decay to zero. + * Half-life determines the rate at which values in the CMS decay. If a key was counted once at time t, by + * time (t + halfLife), the value for that key will be 0.5. After enough half lives the value will decay to + * zero. * * The size of the CMS in bytes is O(depth * width). * - * Width controls the relative error due to over-counting - * (approximately 1/width). For 1% error, use width=100, for 0.1% - * error, use width=1000, etc. + * Width controls the relative error due to over-counting (approximately 1/width). For 1% error, use + * width=100, for 0.1% error, use width=1000, etc. * - * Depth controls the probability the error bounds are broken and - * that probability scales with exp(-alpha * depth) so, a small depth - * (e.g. 5-10) is fine. Each update requires O(depth) work so you - * want to keep this as small as possible. + * Depth controls the probability the error bounds are broken and that probability scales with exp(-alpha * + * depth) so, a small depth (e.g. 5-10) is fine. Each update requires O(depth) work so you want to keep this + * as small as possible. */ def apply[K](seed: Long, halfLife: Duration, depth: Int, width: Int)(implicit hasher: CMSHasher[K] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index d9392a208..78548d55f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -19,23 +19,20 @@ package com.twitter.algebird import scala.collection.compat._ /** - * Classes that support algebraic structures with dynamic switching between - * two representations, the original type O and the eventual type E. - * In the case of Semigroup, we specify - * - Two Semigroups eventualSemigroup and originalSemigroup - * - A Semigroup homomorphism convert: O => E - * - A conditional mustConvert: O => Boolean - * Then we get a Semigroup[Either[E,O]], where: - * Left(x) + Left(y) = Left(x+y) - * Left(x) + Right(y) = Left(x+convert(y)) - * Right(x) + Left(y) = Left(convert(x)+y) - * Right(x) + Right(y) = Left(convert(x+y)) if mustConvert(x+y) - * Right(x+y) otherwise. - * EventuallyMonoid, EventuallyGroup, and EventuallyRing are defined analogously, - * with the contract that convert respect the appropriate structure. + * Classes that support algebraic structures with dynamic switching between two representations, the original + * type O and the eventual type E. In the case of Semigroup, we specify + * - Two Semigroups eventualSemigroup and originalSemigroup + * - A Semigroup homomorphism convert: O => E + * - A conditional mustConvert: O => Boolean Then we get a Semigroup[Either[E,O]], where: Left(x) + Left(y) + * = Left(x+y) Left(x) + Right(y) = Left(x+convert(y)) Right(x) + Left(y) = Left(convert(x)+y) Right(x) + + * Right(y) = Left(convert(x+y)) if mustConvert(x+y) Right(x+y) otherwise. EventuallyMonoid, EventuallyGroup, + * and EventuallyRing are defined analogously, with the contract that convert respect the appropriate + * structure. * - * @param E eventual type - * @param O original type + * @param E + * eventual type + * @param O + * original type */ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit eventualSemigroup: Semigroup[E], @@ -79,7 +76,7 @@ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)(impl Left(newBuffer) } - (iter.iterator.foldLeft[Either[Buffer[E], Buffer[O]]](Right(Buffer[O]())) { + iter.iterator.foldLeft[Either[Buffer[E], Buffer[O]]](Right(Buffer[O]())) { case (buffer @ Left(be), v) => // turns the list of either into an either of lists checkSize(be) @@ -98,7 +95,7 @@ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)(impl bo += vo buffer } - }) match { // finally apply sumOption accordingly + } match { // finally apply sumOption accordingly case Left(be) => Semigroup.sumOption(be).map(left(_)) case Right(bo) => if (bo.lengthCompare(1) <= 0) bo.headOption.map(Right(_)) @@ -122,7 +119,8 @@ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)(impl } /** - * @see EventuallySemigroup + * @see + * EventuallySemigroup */ class EventuallyMonoid[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit lSemigroup: Semigroup[E], @@ -135,7 +133,8 @@ class EventuallyMonoid[E, O](convert: O => E)(mustConvert: O => Boolean)(implici } /** - * @see EventuallySemigroup + * @see + * EventuallySemigroup */ class EventuallyGroup[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit lGroup: Group[E], @@ -154,7 +153,8 @@ class EventuallyGroup[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit } /** - * @see EventuallySemigroup + * @see + * EventuallySemigroup */ class EventuallyRing[E, O](convert: O => E)(mustConvert: O => Boolean)(implicit lRing: Ring[E], diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index 4eb630e3c..567580fd0 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -5,37 +5,35 @@ import scala.annotation.tailrec import scala.collection.mutable.Builder /** - * Exponential Histogram algorithm from - * http://www-cs-students.stanford.edu/~datar/papers/sicomp_streams.pdf + * Exponential Histogram algorithm from http://www-cs-students.stanford.edu/~datar/papers/sicomp_streams.pdf * - * An Exponential Histogram is a sliding window counter that can - * guarantee a bounded relative error. You configure the data - * structure with + * An Exponential Histogram is a sliding window counter that can guarantee a bounded relative error. You + * configure the data structure with * - * - epsilon, the relative error you're willing to tolerate - * - windowSize, the number of time ticks that you want to track + * - epsilon, the relative error you're willing to tolerate + * - windowSize, the number of time ticks that you want to track * - * You interact with the data structure by adding (number, timestamp) - * pairs into the exponential histogram. querying it for an - * approximate counts with `guess`. + * You interact with the data structure by adding (number, timestamp) pairs into the exponential histogram. + * querying it for an approximate counts with `guess`. * - * The approximate count is guaranteed to be within conf.epsilon - * relative error of the true count seen across the supplied - * `windowSize`. + * The approximate count is guaranteed to be within conf.epsilon relative error of the true count seen across + * the supplied `windowSize`. * * Next steps: * - * - efficient serialization - * - Query EH with a shorter window than the configured window - * - Discussion of epsilon vs memory tradeoffs + * - efficient serialization + * - Query EH with a shorter window than the configured window + * - Discussion of epsilon vs memory tradeoffs * - * @param conf the config values for this instance. - * @param buckets Vector of timestamps of each (powers of 2) - * ticks. This is the key to the exponential histogram - * representation. See [[ExpHist.Canonical]] for more - * info. - * @param total total ticks tracked. `total == buckets.map(_.size).sum` - * @param time current timestamp of this instance. + * @param conf + * the config values for this instance. + * @param buckets + * Vector of timestamps of each (powers of 2) ticks. This is the key to the exponential histogram + * representation. See [[ExpHist.Canonical]] for more info. + * @param total + * total ticks tracked. `total == buckets.map(_.size).sum` + * @param time + * current timestamp of this instance. */ case class ExpHist( conf: ExpHist.Config, @@ -46,12 +44,13 @@ case class ExpHist( import ExpHist.{Bucket, Canonical, Timestamp} /** - * Steps this instance forward to the new supplied time. Any - * buckets with a timestamp <= (newTime - conf.windowSize) will be - * evicted. + * Steps this instance forward to the new supplied time. Any buckets with a timestamp <= (newTime - + * conf.windowSize) will be evicted. * - * @param newTime the new current time. - * @return ExpHist instance stepped forward to newTime. + * @param newTime + * the new current time. + * @return + * ExpHist instance stepped forward to newTime. */ def step(newTime: Timestamp): ExpHist = if (newTime <= time) this @@ -77,9 +76,10 @@ case class ExpHist( /** * Efficiently add many buckets at once. * - * @param unsorted vector of buckets. All timestamps must be >= this.time. - * @return ExpHist instance with all buckets added, stepped - * forward to the max timestamp in `unsorted`. + * @param unsorted + * vector of buckets. All timestamps must be >= this.time. + * @return + * ExpHist instance with all buckets added, stepped forward to the max timestamp in `unsorted`. */ def addAll(unsorted: Vector[Bucket]): ExpHist = if (unsorted.isEmpty) this @@ -96,8 +96,8 @@ case class ExpHist( } /** - * Returns a [[Fold]] instance that uses `add` to accumulate deltas - * into this exponential histogram instance. + * Returns a [[Fold]] instance that uses `add` to accumulate deltas into this exponential histogram + * instance. */ def fold: Fold[Bucket, ExpHist] = Fold.foldMutable[Builder[Bucket, Vector[Bucket]], Bucket, ExpHist]( @@ -121,29 +121,26 @@ case class ExpHist( def oldestBucketSize: Long = if (total == 0) 0L else buckets.last.size /** - * Smallest possible count seen in the last conf.windowSize - * timestamps. + * Smallest possible count seen in the last conf.windowSize timestamps. */ def lowerBoundSum: Long = total - oldestBucketSize /** - * Largest possible count seen in the last conf.windowSize - * timestamps. + * Largest possible count seen in the last conf.windowSize timestamps. */ def upperBoundSum: Long = total /** - * Estimate of the count seen across the last conf.windowSize - * timestamps. Guaranteed to be within conf.epsilon of the true - * count. + * Estimate of the count seen across the last conf.windowSize timestamps. Guaranteed to be within + * conf.epsilon of the true count. */ def guess: Double = if (total == 0) 0.0 else (total - (oldestBucketSize - 1) / 2.0) /** - * Returns an Approximate instance encoding the bounds and the - * closest long to the estimated sum tracked by this instance. + * Returns an Approximate instance encoding the bounds and the closest long to the estimated sum tracked by + * this instance. */ def approximateSum: Approximate[Long] = Approximate(lowerBoundSum, math.round(guess), upperBoundSum, 1.0) @@ -177,8 +174,10 @@ object ExpHist { } /** - * @param size number of items tracked by this bucket. - * @param timestamp timestamp of the most recent item tracked by this bucket. + * @param size + * number of items tracked by this bucket. + * @param timestamp + * timestamp of the most recent item tracked by this bucket. */ case class Bucket(size: Long, timestamp: Timestamp) @@ -187,11 +186,12 @@ object ExpHist { } /** - * ExpHist guarantees that the returned guess will be within - * `epsilon` relative error of the true count across a sliding - * window of size `windowSize`. - * @param epsilon relative error, from [0, 0.5] - * @param windowSize number of time ticks to track + * ExpHist guarantees that the returned guess will be within `epsilon` relative error of the true count + * across a sliding window of size `windowSize`. + * @param epsilon + * relative error, from [0, 0.5] + * @param windowSize + * number of time ticks to track */ case class Config(epsilon: Double, windowSize: Long) { val k: Int = math.ceil(1 / epsilon).toInt @@ -208,9 +208,8 @@ object ExpHist { ExpHist.dropExpired(buckets, expiration(currTime)) /** - * Returns a [[Fold]] instance that uses `add` to accumulate deltas - * into an empty exponential histogram instance configured with - * this Config. + * Returns a [[Fold]] instance that uses `add` to accumulate deltas into an empty exponential histogram + * instance configured with this Config. */ def fold: Fold[Bucket, ExpHist] = ExpHist.empty(this).fold } @@ -222,9 +221,8 @@ object ExpHist { ExpHist(conf, Vector.empty, 0L, Timestamp(0L)) /** - * Returns an instance directly from a number `i`. All buckets in - * the returned ExpHist will have the same timestamp, equal to - * `ts`. + * Returns an instance directly from a number `i`. All buckets in the returned ExpHist will have the same + * timestamp, equal to `ts`. */ def from(i: Long, ts: Timestamp, conf: Config): ExpHist = { val buckets = Canonical.bucketsFromLong(i, conf.l).map(Bucket(_, ts)) @@ -232,9 +230,12 @@ object ExpHist { } /** - * @param buckets [buckets] sorted in DESCENDING order (recent first) - * @param cutoff buckets with ts <= cutoff are expired - * @return the sum of evicted bucket sizes and the unexpired buckets + * @param buckets + * [buckets] sorted in DESCENDING order (recent first) + * @param cutoff + * buckets with ts <= cutoff are expired + * @return + * the sum of evicted bucket sizes and the unexpired buckets */ private[algebird] def dropExpired(buckets: Vector[Bucket], cutoff: Timestamp): (Long, Vector[Bucket]) = { val (dropped, remaining) = buckets.reverse.span(_.timestamp <= cutoff) @@ -242,8 +243,7 @@ object ExpHist { } /** - * Converts the supplied buckets into a NEW vector of buckets - * satisfying this law: + * Converts the supplied buckets into a NEW vector of buckets satisfying this law: * * {{{ * rebucket(buckets, desired).map(_.size).sum == desired @@ -251,8 +251,10 @@ object ExpHist { * * (rebucket only works if desired.sum == buckets.map(_.size).sum) * - * @param buckets vector of buckets sorted in DESCENDING order (recent first) - * @param desired bucket sizes to rebucket `buckets` into. + * @param buckets + * vector of buckets sorted in DESCENDING order (recent first) + * @param desired + * bucket sizes to rebucket `buckets` into. */ private[algebird] def rebucket(buckets: Vector[Bucket], desired: Vector[Long]): Vector[Bucket] = if (desired.isEmpty) Vector.empty @@ -264,12 +266,13 @@ object ExpHist { } /** - * @param toDrop total count to remove from the left of `input`. - * @param input buckets - * @return Vector with buckets, or pieces of buckets, with sizes - * totalling `toDrop` items removed from the head. If an - * element wasn't fully consumed, the remainder will be - * stuck back onto the head. + * @param toDrop + * total count to remove from the left of `input`. + * @param input + * buckets + * @return + * Vector with buckets, or pieces of buckets, with sizes totalling `toDrop` items removed from the head. + * If an element wasn't fully consumed, the remainder will be stuck back onto the head. */ @tailrec private[this] def drop(toDrop: Long, input: Vector[Bucket]): Vector[Bucket] = { val (b @ Bucket(count, _)) +: tail = input @@ -281,9 +284,8 @@ object ExpHist { } /** - * The paper that introduces the exponential histogram proves that, - * given a positive number `l`, every integer s can be uniquely - * represented as the sum of + * The paper that introduces the exponential histogram proves that, given a positive number `l`, every + * integer s can be uniquely represented as the sum of * * (l or (l + 1)) * 2^i + (# from 1 to (l + 1)) 2^j * @@ -291,27 +293,23 @@ object ExpHist { * * The paper calls this the "l-canonical" representation of s. * - * It turns out that if you follow the exponential histogram - * bucket-merging algorithm, you end up with the invariant that the - * number of buckets with size 2^i exactly matches that power of 2's - * coefficient in s's l-canonical representation. + * It turns out that if you follow the exponential histogram bucket-merging algorithm, you end up with the + * invariant that the number of buckets with size 2^i exactly matches that power of 2's coefficient in s's + * l-canonical representation. * - * Put another way - only sequences of buckets with sizes matching - * the l-canonical representation of some number s are valid - * exponential histograms. + * Put another way - only sequences of buckets with sizes matching the l-canonical representation of some + * number s are valid exponential histograms. * - * (We use this idea in `ExpHist.rebucket` to take a sequence of - * buckets of any size and rebucket them into a sequence where the - * above invariant holds.) + * (We use this idea in `ExpHist.rebucket` to take a sequence of buckets of any size and rebucket them into + * a sequence where the above invariant holds.) * * This is huge. This means that you can implement `addAll(newBuckets)` by * - * - calculating newS = s + delta contributed by newBuckets - * - generating the l-canonical sequence of bucket sizes for newS - * - rebucketing newBuckets ++ oldBuckets into those bucket sizes + * - calculating newS = s + delta contributed by newBuckets + * - generating the l-canonical sequence of bucket sizes for newS + * - rebucketing newBuckets ++ oldBuckets into those bucket sizes * - * The resulting sequence of buckets is a valid exponential - * histogram. + * The resulting sequence of buckets is a valid exponential histogram. */ object Canonical { @inline private[this] def floorPowerOfTwo(x: Long): Int = @@ -326,10 +324,12 @@ object ExpHist { (0 until bits).map(idx => offset + bit(i, idx)).toVector /** - * @param s the number to convert to l-canonical form - * @param l the "l" in l-canonical form - * @return vector of the coefficients of 2^i in the - * l-canonical representation of s. + * @param s + * the number to convert to l-canonical form + * @param l + * the "l" in l-canonical form + * @return + * vector of the coefficients of 2^i in the l-canonical representation of s. * * For example: * @@ -341,17 +341,17 @@ object ExpHist { * * the "l" in l-canonical means that * - * - all return vector entries but the last one == `l` or `l + 1` - * - 1 <= `returnVector.last` <= l + 1 + * - all return vector entries but the last one == `l` or `l + 1` + * - 1 <= `returnVector.last` <= l + 1 * * ## L-Canonical Representation Procedure: * - * - Find the largest j s.t. 2^j <= (s + l) / (1 + l) - * - let s' = 2^j(1 + l) - l + * - Find the largest j s.t. 2^j <= (s + l) / (1 + l) + * - let s' = 2^j(1 + l) - l * - * - let diff = (s - s') is the position of s within that group. - * - let b = the little-endian binary rep of diff % (2^j - 1) - * - let ret = return vector of length j: + * - let diff = (s - s') is the position of s within that group. + * - let b = the little-endian binary rep of diff % (2^j - 1) + * - let ret = return vector of length j: * * {{{ * (0 until j).map { i => ret(i) = b(i) + l } @@ -369,12 +369,13 @@ object ExpHist { } /** - * @param s the number to convert to l-canonical form - * @param l the "l" in l-canonical form - * @return vector of numbers that sum to s. Each - * entry is a power of 2, and the number of entries of - * each power of 2 matches the l-canonical - * representation of s. + * @param s + * the number to convert to l-canonical form + * @param l + * the "l" in l-canonical form + * @return + * vector of numbers that sum to s. Each entry is a power of 2, and the number of entries of each power + * of 2 matches the l-canonical representation of s. * * Note that: * @@ -405,8 +406,10 @@ object ExpHist { /** * Expands out an l-canonical representation into the original number. * - * @param rep l-canonical representation of some number s for some l - * @return The original s + * @param rep + * l-canonical representation of some number s for some l + * @return + * The original s */ def toLong: Long = Monoid.sum( @@ -415,11 +418,13 @@ object ExpHist { ) /** - * Expands out the l-canonical representation of some number s into - * a list of bucket sizes in ascending order. + * Expands out the l-canonical representation of some number s into a list of bucket sizes in ascending + * order. * - * @param rep l-canonical representation of some number s for some l - * @return vector of powers of 2 (where ret.sum == the original s) + * @param rep + * l-canonical representation of some number s for some l + * @return + * vector of powers of 2 (where ret.sum == the original s) */ def toBuckets: Vector[Long] = rep.iterator.zipWithIndex.flatMap { case (i, exp) => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/First.scala b/algebird-core/src/main/scala/com/twitter/algebird/First.scala index 44d7c58f8..a1238b4f8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/First.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/First.scala @@ -19,17 +19,18 @@ import algebra.Band import scala.collection.compat._ /** - * Tracks the "least recent", or earliest, wrapped instance of `T` by - * the order in which items are seen. + * Tracks the "least recent", or earliest, wrapped instance of `T` by the order in which items are seen. * - * @param get wrapped instance of `T` + * @param get + * wrapped instance of `T` */ case class First[@specialized(Int, Long, Float, Double) +T](get: T) { /** * Returns this instance, always. * - * @param r ignored instance of `First[U]` + * @param r + * ignored instance of `First[U]` */ def +[U >: T](r: First[U]): First[T] = { val _ = r //suppressing unused `r` @@ -38,14 +39,12 @@ case class First[@specialized(Int, Long, Float, Double) +T](get: T) { } /** - * Provides a set of operations and typeclass instances needed to use - * [[First]] instances. + * Provides a set of operations and typeclass instances needed to use [[First]] instances. */ object First extends FirstInstances { /** - * Returns an [[Aggregator]] that selects the first instance of `T` - * in the aggregated stream. + * Returns an [[Aggregator]] that selects the first instance of `T` in the aggregated stream. */ def aggregator[T]: FirstAggregator[T] = FirstAggregator() } @@ -53,12 +52,11 @@ object First extends FirstInstances { private[algebird] sealed abstract class FirstInstances { /** - * Returns a [[Semigroup]] instance with a `plus` implementation - * that always returns the first (ie, the left) `T` argument. + * Returns a [[Semigroup]] instance with a `plus` implementation that always returns the first (ie, the + * left) `T` argument. * - * This semigroup's `sumOption` is efficient; it only selects the - * head of the `TraversableOnce` instance, leaving the rest - * untouched. + * This semigroup's `sumOption` is efficient; it only selects the head of the `TraversableOnce` instance, + * leaving the rest untouched. */ def firstSemigroup[T]: Semigroup[T] with Band[T] = new Semigroup[T] with Band[T] { @@ -69,17 +67,15 @@ private[algebird] sealed abstract class FirstInstances { } /** - * Returns a [[Semigroup]] instance for [[First]][T]. The `plus` - * implementation always returns the first (ie, the left) `First[T]` - * argument. + * Returns a [[Semigroup]] instance for [[First]] [T]. The `plus` implementation always returns the first + * (ie, the left) `First[T]` argument. */ implicit def semigroup[T]: Semigroup[First[T]] with Band[First[T]] = firstSemigroup[First[T]] } /** - * [[Aggregator]] that selects the first instance of `T` in the - * aggregated stream. + * [[Aggregator]] that selects the first instance of `T` in the aggregated stream. */ case class FirstAggregator[T]() extends Aggregator[T, T, T] { override def prepare(v: T): T = v diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index 69d2d832e..ed95d61f5 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -19,20 +19,20 @@ import java.io.Serializable import scala.collection.compat._ /** - * Folds are first-class representations of "Traversable.foldLeft." They have the nice property that - * they can be fused to work in parallel over an input sequence. + * Folds are first-class representations of "Traversable.foldLeft." They have the nice property that they can + * be fused to work in parallel over an input sequence. * - * A Fold accumulates inputs (I) into some internal type (X), converting to a defined output type - * (O) when done. We use existential types to hide internal details and to allow for internal and - * external (X and O) types to differ for "map" and "join." + * A Fold accumulates inputs (I) into some internal type (X), converting to a defined output type (O) when + * done. We use existential types to hide internal details and to allow for internal and external (X and O) + * types to differ for "map" and "join." * - * In discussing this type we draw parallels to Function1 and related types. You can think of a - * fold as a function "Seq[I] => O" but in reality we do not have to materialize the input sequence - * at once to "run" the fold. + * In discussing this type we draw parallels to Function1 and related types. You can think of a fold as a + * function "Seq[I] => O" but in reality we do not have to materialize the input sequence at once to "run" the + * fold. * - * The traversal of the input data structure is NOT done by Fold itself. Instead we expose some - * methods like "overTraversable" that know how to iterate through various sequence types and drive - * the fold. We also expose some internal state so library authors can fold over their own types. + * The traversal of the input data structure is NOT done by Fold itself. Instead we expose some methods like + * "overTraversable" that know how to iterate through various sequence types and drive the fold. We also + * expose some internal state so library authors can fold over their own types. * * See the companion object for constructors. */ @@ -41,27 +41,26 @@ sealed trait Fold[-I, +O] extends Serializable { /** * Users can ignore this type. * - * The internal accumulator type. No one outside this Fold needs to know what this is, and that's - * a good thing. It keeps type signatures sane and makes this easy to use for the amount of - * flexibility it provides. + * The internal accumulator type. No one outside this Fold needs to know what this is, and that's a good + * thing. It keeps type signatures sane and makes this easy to use for the amount of flexibility it + * provides. */ type X /** - * Users can ignore this method. It is exposed so library authors can run folds over their own - * sequence types. + * Users can ignore this method. It is exposed so library authors can run folds over their own sequence + * types. * - * "build" constructs a FoldState, which tells us how to run the fold. It is expected that we can - * run the same Fold many times over different data structures, but we must build a new FoldState - * every time. + * "build" constructs a FoldState, which tells us how to run the fold. It is expected that we can run the + * same Fold many times over different data structures, but we must build a new FoldState every time. * * See FoldState for information on how to use this for your own sequence types. */ def build(): FoldState[X, I, O] /** - * Transforms the output of the Fold after iteration is complete. This is analogous to - * "Future.map" or "Function1.compose." + * Transforms the output of the Fold after iteration is complete. This is analogous to "Future.map" or + * "Function1.compose." */ def map[P](f: O => P): Fold[I, P] = { val self = this @@ -73,8 +72,8 @@ sealed trait Fold[-I, +O] extends Serializable { } /** - * Joins two folds into one and combines the results. The fused fold accumulates with both at the - * same time and combines at the end. + * Joins two folds into one and combines the results. The fused fold accumulates with both at the same time + * and combines at the end. */ def joinWith[I2 <: I, P, Q](other: Fold[I2, P])(f: (O, P) => Q): Fold[I2, Q] = { val self = this @@ -99,8 +98,8 @@ sealed trait Fold[-I, +O] extends Serializable { joinWith(other) { case (o, p) => (o, p) } /** - * Transforms the input of the fold before every accumulation. (The name comes from "contravariant - * map.") This is analogous to "Function1.andThen." + * Transforms the input of the fold before every accumulation. (The name comes from "contravariant map.") + * This is analogous to "Function1.andThen." */ def contramap[H](f: H => I): Fold[H, O] = { val self = this @@ -138,21 +137,17 @@ sealed trait Fold[-I, +O] extends Serializable { } /** - * A FoldState defines a left fold with a "hidden" accumulator type. It is exposed so - * library authors can run Folds over their own sequence types. + * A FoldState defines a left fold with a "hidden" accumulator type. It is exposed so library authors can run + * Folds over their own sequence types. * - * The fold can be executed correctly according to the properties of "add" and your traversed - * data structure. For example, the "add" function of a monoidal fold will be associative. A - * FoldState is valid for only one iteration because the accumulator (seeded by "start" may be - * mutable. + * The fold can be executed correctly according to the properties of "add" and your traversed data structure. + * For example, the "add" function of a monoidal fold will be associative. A FoldState is valid for only one + * iteration because the accumulator (seeded by "start" may be mutable. * - * The three components of a fold are - * add: (X, I) => X - updates and returns internal state for every input I - * start: X - the initial state - * end: X => O - transforms internal state to a final result + * The three components of a fold are add: (X, I) => X - updates and returns internal state for every input I + * start: X - the initial state end: X => O - transforms internal state to a final result * - * Folding over Seq(x, y) would produce the result - * end(add(add(start, x), y)) + * Folding over Seq(x, y) would produce the result end(add(add(start, x), y)) */ final class FoldState[X, -I, +O] private[algebird] (val add: (X, I) => X, val start: X, val end: X => O) extends Serializable { @@ -173,10 +168,10 @@ final class FoldState[X, -I, +O] private[algebird] (val add: (X, I) => X, val st /** * Methods to create and run Folds. * - * The Folds defined here are immutable and serializable, which we expect by default. It is - * important that you as a user indicate mutability or non-serializability when defining new Folds. - * Additionally, it is recommended that "end" functions not mutate the accumulator in order to - * support scans (producing a stream of intermediate outputs by calling "end" at each step). + * The Folds defined here are immutable and serializable, which we expect by default. It is important that you + * as a user indicate mutability or non-serializability when defining new Folds. Additionally, it is + * recommended that "end" functions not mutate the accumulator in order to support scans (producing a stream + * of intermediate outputs by calling "end" at each step). */ object Fold extends CompatFold { @@ -187,15 +182,14 @@ object Fold extends CompatFold { new FoldApplicative[I] /** - * Turn a common Scala foldLeft into a Fold. - * The accumulator MUST be immutable and serializable. + * Turn a common Scala foldLeft into a Fold. The accumulator MUST be immutable and serializable. */ def foldLeft[I, O](o: O)(add: (O, I) => O): Fold[I, O] = fold[O, I, O](add, o, o => o) /** - * A general way of defining Folds that supports a separate accumulator type. - * The accumulator MUST be immutable and serializable. + * A general way of defining Folds that supports a separate accumulator type. The accumulator MUST be + * immutable and serializable. */ def fold[M, I, O](add: (M, I) => M, start: M, end: M => O): Fold[I, O] = new Fold[I, O] { @@ -205,8 +199,7 @@ object Fold extends CompatFold { } /** - * A general way of defining Folds that supports constructing mutable or non-serializable - * accumulators. + * A general way of defining Folds that supports constructing mutable or non-serializable accumulators. */ def foldMutable[M, I, O](add: (M, I) => M, start: Unit => M, end: M => O): Fold[I, O] = new Fold[I, O] { @@ -237,15 +230,15 @@ object Fold extends CompatFold { } /** - * An even simpler Fold that collects into a Seq. Shorthand for "container[I, Seq];" fewer type - * arguments, better type inferrence. + * An even simpler Fold that collects into a Seq. Shorthand for "container[I, Seq];" fewer type arguments, + * better type inferrence. */ def seq[I]: Fold[I, Seq[I]] = container[I, Seq] /** - * A Fold that does no work and returns a constant. Analogous to Function1 const: - * def const[A, B](b: B): (A => B) = { _ => b } + * A Fold that does no work and returns a constant. Analogous to Function1 const: def const[A, B](b: B): (A + * => B) = { _ => b } */ def const[O](value: O): Fold[Any, O] = Fold.foldLeft(value) { case (u, _) => u } @@ -319,15 +312,15 @@ object Fold extends CompatFold { Fold.foldLeft(0L) { case (x, _) => x + 1 } /** - * A Fold that returns "true" if all elements of the sequence statisfy the predicate. - * Note this does not short-circuit enumeration of the sequence. + * A Fold that returns "true" if all elements of the sequence statisfy the predicate. Note this does not + * short-circuit enumeration of the sequence. */ def forall[I](pred: I => Boolean): Fold[I, Boolean] = foldLeft(true)((b, i) => b && pred(i)) /** - * A Fold that returns "true" if any element of the sequence statisfies the predicate. - * Note this does not short-circuit enumeration of the sequence. + * A Fold that returns "true" if any element of the sequence statisfies the predicate. Note this does not + * short-circuit enumeration of the sequence. */ def exists[I](pred: I => Boolean): Fold[I, Boolean] = foldLeft(false)((b, i) => b || pred(i)) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Functor.scala b/algebird-core/src/main/scala/com/twitter/algebird/Functor.scala index e2ee3e1ca..d4d70cc4b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Functor.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Functor.scala @@ -20,9 +20,7 @@ import scala.annotation.implicitNotFound /** * Simple implementation of a Functor type-class. * - * Laws Functors must follow: - * map(m)(id) == m - * map(m)(f andThen g) == map(map(m)(f))(g) + * Laws Functors must follow: map(m)(id) == m map(m)(f andThen g) == map(map(m)(f))(g) */ @implicitNotFound(msg = "Cannot find Functor type class for ${M}") trait Functor[M[_]] { @@ -49,8 +47,8 @@ object Functor { } /** - * This enrichment allows us to use our Functor instances in for expressions: - * if (import Functor._) has been done + * This enrichment allows us to use our Functor instances in for expressions: if (import Functor._) has been + * done */ class FunctorOperators[A, M[_]](m: M[A])(implicit functor: Functor[M]) { // This is called fmap in haskell diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Group.scala b/algebird-core/src/main/scala/com/twitter/algebird/Group.scala index b82b77cdc..0ae546d45 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Group.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Group.scala @@ -18,12 +18,12 @@ package com.twitter.algebird import algebra.{Group => AGroup} import algebra.ring.AdditiveGroup import java.lang.{ + Boolean => JBool, + Double => JDouble, + Float => JFloat, Integer => JInt, - Short => JShort, Long => JLong, - Float => JFloat, - Double => JDouble, - Boolean => JBool + Short => JShort } import scala.reflect.ClassTag @@ -32,8 +32,8 @@ import scala.annotation.implicitNotFound import scala.math.Equiv /** - * Group: this is a monoid that also has subtraction (and negation): - * So, you can do (a-b), or -a (which is equal to 0 - a). + * Group: this is a monoid that also has subtraction (and negation): So, you can do (a-b), or -a (which is + * equal to 0 - a). */ @implicitNotFound(msg = "Cannot find Group type class for ${T}") @@ -68,10 +68,8 @@ object UnitGroup extends ConstantGroup[Unit](()) object NullGroup extends ConstantGroup[Null](null) /** - * Some(5) - Some(3) == Some(2) - * Some(5) - Some(5) == None - * negate Some(5) == Some(-5) - * Note: Some(0) and None are equivalent under this Group + * Some(5) - Some(3) == Some(2) Some(5) - Some(5) == None negate Some(5) == Some(-5) Note: Some(0) and None + * are equivalent under this Group */ class OptionGroup[T](implicit group: Group[T]) extends OptionMonoid[T] with Group[Option[T]] { @@ -83,8 +81,8 @@ class OptionGroup[T](implicit group: Group[T]) extends OptionMonoid[T] with Grou } /** - * Extends pair-wise sum Array monoid into a Group - * negate is defined as the negation of each element of the array. + * Extends pair-wise sum Array monoid into a Group negate is defined as the negation of each element of the + * array. */ class ArrayGroup[T: ClassTag](implicit grp: Group[T]) extends ArrayMonoid[T]() with Group[Array[T]] { override def negate(g: Array[T]): Array[T] = { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala b/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala index 64a97fd69..d507c82bb 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Hash128.scala @@ -17,8 +17,7 @@ limitations under the License. package com.twitter.algebird /** - * A typeclass to represent hashing to 128 bits. - * Used for HLL, but possibly other applications + * A typeclass to represent hashing to 128 bits. Used for HLL, but possibly other applications */ trait Hash128[-K] extends java.io.Serializable { @@ -38,9 +37,8 @@ trait Hash128[-K] extends java.io.Serializable { } /** - * This gives default hashes using Murmur128 with a seed - * of 12345678 (for no good reason, but it should not be changed - * lest we break serialized HLLs) + * This gives default hashes using Murmur128 with a seed of 12345678 (for no good reason, but it should not be + * changed lest we break serialized HLLs) */ object Hash128 extends java.io.Serializable { def hash[K](k: K)(implicit h: Hash128[K]): (Long, Long) = h.hash(k) @@ -64,9 +62,8 @@ object Hash128 extends java.io.Serializable { } /** - * This serializes the string into UTF-8, then hashes. This is different - * than the UTF-16 based approach in Murmur128.apply(CharSequence), - * but has been more commonly used in HLL. + * This serializes the string into UTF-8, then hashes. This is different than the UTF-16 based approach in + * Murmur128.apply(CharSequence), but has been more commonly used in HLL. */ def murmur128Utf8String(defaultSeed: Long): Hash128[String] = murmur128ArrayByte(defaultSeed).contramap(_.getBytes("UTF-8")) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index c2eab81f5..9bebfd22a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -38,10 +38,11 @@ case class BitSetLite(in: Array[Byte]) { /** * Implementation of the HyperLogLog approximate counting as a Monoid * - * @see [[http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf]] + * @see + * [[http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf]] * - * HyperLogLog: the analysis of a near-optimal cardinality estimation algorithm - * Philippe Flajolet and Éric Fusy and Olivier Gandouet and Frédéric Meunier + * HyperLogLog: the analysis of a near-optimal cardinality estimation algorithm Philippe Flajolet and Éric + * Fusy and Olivier Gandouet and Frédéric Meunier */ object HyperLogLog { @@ -113,15 +114,13 @@ object HyperLogLog { rhoW(bsl.in, bits) /** - * The value 'w' is represented as a bitset (encoding in - * `bytes`). This function counts the number of leading zeros in 'w'. + * The value 'w' is represented as a bitset (encoding in `bytes`). This function counts the number of + * leading zeros in 'w'. * - * Each byte is treated as a set of bits (little-endian). That is, - * the one bit represents the first value, then the two bit, then - * four, and so on. + * Each byte is treated as a set of bits (little-endian). That is, the one bit represents the first value, + * then the two bit, then four, and so on. * - * We treat the leading `bits` bits as if they were instead a single - * zero bit. + * We treat the leading `bits` bits as if they were instead a single zero bit. */ def rhoW(bytes: Array[Byte], bits: Int): Byte = { var i = bits / 8 // tracks the position in bytes @@ -147,12 +146,11 @@ object HyperLogLog { } /** - * We are computing j and \rho(w) from the paper, - * sorry for the name, but it allows someone to compare to the paper extremely low probability - * rhow (position of the leftmost one bit) is > 127, so we use a Byte to store it - * Given a hash the value 'j' is equal to and - * the value 'w' is equal to . The function rho counts the number of leading - * zeroes in 'w'. We can calculate rho(w) at once with the method rhoW. + * We are computing j and \rho(w) from the paper, sorry for the name, but it allows someone to compare to + * the paper extremely low probability rhow (position of the leftmost one bit) is > 127, so we use a Byte to + * store it Given a hash the value 'j' is equal to and the + * value 'w' is equal to . The function rho counts the number of leading zeroes in 'w'. We + * can calculate rho(w) at once with the method rhoW. */ def jRhoW(in: Array[Byte], bits: Int): (Int, Byte) = (j(in, bits), rhoW(in, bits)) @@ -227,36 +225,22 @@ object HyperLogLog { } /** - * The true error is distributed like a Gaussian with - * this standard deviation. - * let m = 2^bits. The size of the HLL is m bytes. + * The true error is distributed like a Gaussian with this standard deviation. let m = 2^bits. The size of + * the HLL is m bytes. * - * bits | size | error - * 9 512 0.0460 - * 10 1024 0.0325 - * 11 2048 0.0230 - * 12 4096 0.0163 - * 13 8192 0.0115 - * 14 16384 0.0081 - * 15 32768 0.0057 - * 16 65536 0.0041 - * 17 131072 0.0029 - * 18 262144 0.0020 - * 19 524288 0.0014 - * 20 1048576 0.0010 + * bits | size | error 9 512 0.0460 10 1024 0.0325 11 2048 0.0230 12 4096 0.0163 13 8192 0.0115 14 16384 + * 0.0081 15 32768 0.0057 16 65536 0.0041 17 131072 0.0029 18 262144 0.0020 19 524288 0.0014 20 1048576 + * 0.0010 * - * Keep in mind, to store N distinct longs, you only need 8N bytes. - * See SetSizeAggregator for an approach that uses an exact set - * when the cardinality is small, and switches to HLL after we have - * enough items. Ideally, you would keep an exact set until it is - * smaller to store the HLL (but actually since we use sparse vectors - * to store the HLL, a small HLL takes a lot less than the size above). + * Keep in mind, to store N distinct longs, you only need 8N bytes. See SetSizeAggregator for an approach + * that uses an exact set when the cardinality is small, and switches to HLL after we have enough items. + * Ideally, you would keep an exact set until it is smaller to store the HLL (but actually since we use + * sparse vectors to store the HLL, a small HLL takes a lot less than the size above). */ def error(bits: Int): Double = 1.04 / scala.math.sqrt(twopow(bits)) /** - * This gives you a number of bits to use to have a given standard - * error + * This gives you a number of bits to use to have a given standard error */ def bitsForError(err: Double): Int = { // If the error is less than 0.00003, the HLL needs more than 1 << 31 bytes @@ -321,17 +305,24 @@ sealed abstract class HLL extends java.io.Serializable { def updateInto(buffer: Array[Byte]): Unit /** - * Returns the modified value of rhoW at j, taking into account the - * extra run of bits added to rho due to reduction in the length of j. + * Returns the modified value of rhoW at j, taking into account the extra run of bits added to rho due to + * reduction in the length of j. * - * @param currentJ j for which modified rhoW is needed - * @param currentRhoW Current rhoW value for j - * @param reducedBits New length of j - * @param reducedSize New size (passed in to avoid repeated computation) - * @param bitMask Mask to force early termination of HyperLogLog.rhoW (passed in to avoid repeated computation) - * @param buf Byte array (passed in to avoid repeated allocation) + * @param currentJ + * j for which modified rhoW is needed + * @param currentRhoW + * Current rhoW value for j + * @param reducedBits + * New length of j + * @param reducedSize + * New size (passed in to avoid repeated computation) + * @param bitMask + * Mask to force early termination of HyperLogLog.rhoW (passed in to avoid repeated computation) + * @param buf + * Byte array (passed in to avoid repeated allocation) * - * @return New value of rhoW + * @return + * New value of rhoW */ protected def getModifiedRhoW( currentJ: Int, @@ -361,9 +352,11 @@ sealed abstract class HLL extends java.io.Serializable { * * [[http://research.neustar.biz/2013/03/25/hyperloglog-engineering-choosing-the-right-bits/]] * - * @param reducedBits The new number of bits to use + * @param reducedBits + * The new number of bits to use * - * @return New HLL instance with reduced size + * @return + * New HLL instance with reduced size */ def downsize(reducedBits: Int): HLL = { require(reducedBits > 3 && reducedBits <= bits, s"Use at least 4, and at most $bits bits") @@ -382,12 +375,17 @@ sealed abstract class HLL extends java.io.Serializable { /** * Returns a new HLL instance with reduced size * - * @param reducedBits The new number of bits to use (for the length of j) - * @param reducedSize New size (passed in to avoid repeated computation) - * @param bitMask Mask to force early termination of HyperLogLog.rhoW (passed in to avoid repeated computation) - * @param buf Byte array (passed in to avoid repeated allocation) + * @param reducedBits + * The new number of bits to use (for the length of j) + * @param reducedSize + * New size (passed in to avoid repeated computation) + * @param bitMask + * Mask to force early termination of HyperLogLog.rhoW (passed in to avoid repeated computation) + * @param buf + * Byte array (passed in to avoid repeated allocation) * - * @return New HLL instance with reduced size + * @return + * New HLL instance with reduced size */ protected def downsize(reducedBits: Int, reducedSize: Int, bitMask: Int, buf: Array[Byte]): HLL } @@ -656,24 +654,20 @@ object HyperLogLogAggregator { } /** - * Create an Aggregator that returns the estimate size, not the HLL - * approximate data structure itself. This is convenient, but cannot - * be combined later with another unique count like an HLL could. + * Create an Aggregator that returns the estimate size, not the HLL approximate data structure itself. This + * is convenient, but cannot be combined later with another unique count like an HLL could. * - * @param bits is the log of the size the HLL. + * @param bits + * is the log of the size the HLL. */ def sizeAggregator(bits: Int): MonoidAggregator[Array[Byte], HLL, Double] = apply(bits).andThenPresent(_.estimatedSize) /** - * Give a HyperLogLog Aggregator that have the given error. - * It is up to you, using bitsForError, to see if the size is - * still practical for your application. + * Give a HyperLogLog Aggregator that have the given error. It is up to you, using bitsForError, to see if + * the size is still practical for your application. * - * 0.016 (1.6%), 4 KB - * 0.006 (0.6%), 32 KB - * 0.002 (0.2%), 256 KB - * 0.001 (0.1%), 1024 KB + * 0.016 (1.6%), 4 KB 0.006 (0.6%), 32 KB 0.002 (0.2%), 256 KB 0.001 (0.1%), 1024 KB * * Cutting the error in half takes 4x the size. */ @@ -692,9 +686,9 @@ object HyperLogLogAggregator { } /** - * Give an approximate set size (not the HLL) based on inputs of Array[Byte] - * see HyperLogLog.bitsForError for a size table based on the error - * see SetSizeHashAggregator for a version that uses exact sets up to a given size + * Give an approximate set size (not the HLL) based on inputs of Array[Byte] see HyperLogLog.bitsForError + * for a size table based on the error see SetSizeHashAggregator for a version that uses exact sets up to a + * given size */ def sizeWithError(err: Double): MonoidAggregator[Array[Byte], HLL, Double] = withError(err).andThenPresent(_.estimatedSize) @@ -740,9 +734,9 @@ case class SetSizeAggregator[A](hllBits: Int, maxSetSize: Int = 10)(implicit toB } /** - * Use a Hash128 when converting to HLL, rather than an implicit conversion to Array[Byte] - * Unifying with SetSizeAggregator would be nice, but since they only differ in an implicit - * parameter, scala seems to be giving me errors. + * Use a Hash128 when converting to HLL, rather than an implicit conversion to Array[Byte] Unifying with + * SetSizeAggregator would be nice, but since they only differ in an implicit parameter, scala seems to be + * giving me errors. */ case class SetSizeHashAggregator[A](hllBits: Int, maxSetSize: Int = 10)(implicit hash: Hash128[A]) extends SetSizeAggregatorBase[A](hllBits, maxSetSize) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala index 443b00be2..f795b1a4c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala @@ -17,18 +17,20 @@ limitations under the License. package com.twitter.algebird /** - * HLLSeries can produce a HyperLogLog counter for any window into the past, - * using a constant factor more space than HyperLogLog. + * HLLSeries can produce a HyperLogLog counter for any window into the past, using a constant factor more + * space than HyperLogLog. * - * For each hash bucket, rather than keeping a single max RhoW value, it keeps - * every RhoW value it has seen, and the max timestamp where it saw that value. - * This allows it to reconstruct an HLL as it would be had it started at zero at - * any given point in the past, and seen the same updates this structure has seen. + * For each hash bucket, rather than keeping a single max RhoW value, it keeps every RhoW value it has seen, + * and the max timestamp where it saw that value. This allows it to reconstruct an HLL as it would be had it + * started at zero at any given point in the past, and seen the same updates this structure has seen. * - * @param bits The number of bits to use - * @param rows Vector of maps of RhoW -> max timestamp where it was seen + * @param bits + * The number of bits to use + * @param rows + * Vector of maps of RhoW -> max timestamp where it was seen * - * @return New HLLSeries + * @return + * New HLLSeries */ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { @@ -79,9 +81,11 @@ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { } /** - * @param since Timestamp from which to reconstruct the HLL + * @param since + * Timestamp from which to reconstruct the HLL * - * @return New HLLSeries only including RhoWs for values seen at or after the given timestamp + * @return + * New HLLSeries only including RhoWs for values seen at or after the given timestamp */ def since(threshold: Long): HLLSeries = HLLSeries(bits, rows.map(_.filter { case (_, ts) => ts >= threshold })) @@ -107,15 +111,11 @@ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { * * val hllSeriesMonoid = new HyperLogLogSeriesMonoid(bits) * - * val examples: Seq[Array[Byte], Long] - * val series = examples - * .map { case (bytes, timestamp) => - * hllSeriesMonoid.create(bytes, timestamp) - * } - * .reduce { hllSeriesMonoid.plus(_,_) } + * val examples: Seq[Array[Byte], Long] val series = examples .map { case (bytes, timestamp) => + * hllSeriesMonoid.create(bytes, timestamp) } .reduce { hllSeriesMonoid.plus(_,_) } * - * val estimate1 = series.since(timestamp1.toLong).toHLL.estimatedSize - * val estimate2 = series.since(timestamp2.toLong).toHLL.estimatedSize + * val estimate1 = series.since(timestamp1.toLong).toHLL.estimatedSize val estimate2 = + * series.since(timestamp2.toLong).toHLL.estimatedSize */ class HyperLogLogSeriesMonoid(val bits: Int) extends Monoid[HLLSeries] { import HyperLogLog._ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala b/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala index cbbafdb78..8b7f4a8ef 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/IndexedSeq.scala @@ -17,12 +17,11 @@ limitations under the License. package com.twitter.algebird /** - * Note that this works similar to Semigroup[Map[Int,T]] not like Semigroup[List[T]] - * This does element-wise operations, like standard vector math, not concatenation, - * like Semigroup[String] or Semigroup[List[T]] + * Note that this works similar to Semigroup[Map[Int,T]] not like Semigroup[List[T]] This does element-wise + * operations, like standard vector math, not concatenation, like Semigroup[String] or Semigroup[List[T]] * - * If l.size != r.size, then only sums the elements up to the index min(l.size, r.size); appends - * the remainder to the result. + * If l.size != r.size, then only sums the elements up to the index min(l.size, r.size); appends the remainder + * to the result. */ class IndexedSeqSemigroup[T](implicit semi: Semigroup[T]) extends Semigroup[IndexedSeq[T]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala index 5f00f6732..ea17861c7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala @@ -29,10 +29,8 @@ sealed trait Interval[T] extends java.io.Serializable { final def &&(that: Interval[T])(implicit ord: Ordering[T]): Interval[T] = intersect(that) /** - * Map the Interval with a non-decreasing function. - * If you use a non-monotonic function (like x^2) - * then the result is meaningless. - * TODO: It might be good to have types for these properties in algebird. + * Map the Interval with a non-decreasing function. If you use a non-monotonic function (like x^2) then the + * result is meaningless. TODO: It might be good to have types for these properties in algebird. */ def mapNonDecreasing[U](fn: T => U): Interval[U] } @@ -54,11 +52,9 @@ case class Empty[T]() extends Interval[T] { object Interval extends java.io.Serializable { /** - * Class that only exists so that [[leftClosedRightOpen]] and - * [[leftOpenRightClosed]] can retain the type information of the - * returned interval. The compiler doesn't know anything about - * ordering, so without [[MaybeEmpty]] the only valid return type - * is Interval[T]. + * Class that only exists so that [[leftClosedRightOpen]] and [[leftOpenRightClosed]] can retain the type + * information of the returned interval. The compiler doesn't know anything about ordering, so without + * [[MaybeEmpty]] the only valid return type is Interval[T]. */ sealed abstract class MaybeEmpty[T, NonEmpty[t] <: Interval[t]] { def isEmpty: Boolean @@ -117,9 +113,8 @@ object Interval extends java.io.Serializable { else MaybeEmpty.SoEmpty[T, ExLowExUp]() /** - * This is here for binary compatibility reasons. These methods should - * be moved to Interval, which should also be an abstract class for - * better binary compatibility at the next incompatible change + * This is here for binary compatibility reasons. These methods should be moved to Interval, which should + * also be an abstract class for better binary compatibility at the next incompatible change */ implicit final class IntervalMethods[T](val intr: Interval[T]) extends AnyVal { def isEmpty(implicit succ: Successible[T], pred: Predecessible[T]): Boolean = intr match { @@ -142,8 +137,7 @@ object Interval extends java.io.Serializable { } /** - * If this returns Some(t), then intr.contains(t) and there - * is no s less than t such that intr.contains(s) + * If this returns Some(t), then intr.contains(t) and there is no s less than t such that intr.contains(s) * * if this returns None, it may be Empty, Upper or Universe */ @@ -156,8 +150,8 @@ object Interval extends java.io.Serializable { } /** - * If this returns Some(t), then intr.contains(t) and there - * is no s greater than t such that intr.contains(s) + * If this returns Some(t), then intr.contains(t) and there is no s greater than t such that + * intr.contains(s) * * if this returns None, it may be Empty, Lower, or Universe */ @@ -176,19 +170,16 @@ object Interval extends java.io.Serializable { sealed trait Lower[T] extends Interval[T] { /** - * This may give a false positive (but should try not to). - * Note the case of (0,1) for the integers. If they were doubles, - * this would intersect, but since there are no members of the - * set Int that are bigger than 0 and less than 1, they don't really - * intersect. So, ordering is not enough here. You need a stronger + * This may give a false positive (but should try not to). Note the case of (0,1) for the integers. If they + * were doubles, this would intersect, but since there are no members of the set Int that are bigger than 0 + * and less than 1, they don't really intersect. So, ordering is not enough here. You need a stronger * notion, which we don't have a typeclass for. */ def intersects(u: Upper[T])(implicit ord: Ordering[T]): Boolean /** - * The smallest value that is contained here - * This is an Option, because of cases like ExclusiveLower(Int.MaxValue) - * which are pathological and equivalent to Empty + * The smallest value that is contained here This is an Option, because of cases like + * ExclusiveLower(Int.MaxValue) which are pathological and equivalent to Empty */ def least(implicit s: Successible[T]): Option[T] def strictLowerBound(implicit p: Predecessible[T]): Option[T] @@ -205,9 +196,8 @@ sealed trait Lower[T] extends Interval[T] { sealed trait Upper[T] extends Interval[T] { /** - * The smallest value that is contained here - * This is an Option, because of cases like ExclusiveUpper(Int.MinValue), - * which are pathological and equivalent to Empty + * The smallest value that is contained here This is an Option, because of cases like + * ExclusiveUpper(Int.MinValue), which are pathological and equivalent to Empty */ def greatest(implicit p: Predecessible[T]): Option[T] // The smallest value that is not present @@ -345,8 +335,7 @@ case class Intersection[L[t] <: Lower[t], U[t] <: Upper[t], T](lower: L[T], uppe lower.least.filter(upper.contains(_)(s.ordering)) /** - * Goes from lowest to highest for all items - * that are contained in this Intersection + * Goes from lowest to highest for all items that are contained in this Intersection */ def leastToGreatest(implicit s: Successible[T]): Iterable[T] = { val self = this @@ -362,8 +351,7 @@ case class Intersection[L[t] <: Lower[t], U[t] <: Upper[t], T](lower: L[T], uppe upper.greatest.filter(lower.contains(_)(p.ordering)) /** - * Goes from highest to lowest for all items - * that are contained in this Intersection + * Goes from highest to lowest for all items that are contained in this Intersection */ def greatestToLeast(implicit p: Predecessible[T]): Iterable[T] = { val self = this @@ -376,14 +364,11 @@ case class Intersection[L[t] <: Lower[t], U[t] <: Upper[t], T](lower: L[T], uppe } /** - * Some intervals can actually be synonyms for empty: - * (0,0) for instance, contains nothing. This cannot be normalized to - * [a, b) form, thus we return an option - * Also, there are cases like [Int.MinValue, Int.MaxValue] that cannot - * are actually equivalent to Universe. - * The bottom line: if this returns None, it just means you can't express - * it this way, it does not mean it is empty or universe, etc... (there - * are other cases). + * Some intervals can actually be synonyms for empty: (0,0) for instance, contains nothing. This cannot be + * normalized to [a, b) form, thus we return an option Also, there are cases like [Int.MinValue, + * Int.MaxValue] that cannot are actually equivalent to Universe. The bottom line: if this returns None, it + * just means you can't express it this way, it does not mean it is empty or universe, etc... (there are + * other cases). */ def toLeftClosedRightOpen(implicit s: Successible[T] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala index f4f43f88b..b9b829e75 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala @@ -23,7 +23,7 @@ import java.lang.{ Long => JLong, Short => JShort } -import java.util.{List => JList, Map => JMap, ArrayList => JArrayList, HashMap => JHashMap} +import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap} import scala.collection.JavaConverters._ @@ -84,8 +84,8 @@ object JBoolRing extends Ring[JBool] { } /** - * Since Lists are mutable, this always makes a full copy. Prefer scala immutable Lists - * if you use scala immutable lists, the tail of the result of plus is always the right argument + * Since Lists are mutable, this always makes a full copy. Prefer scala immutable Lists if you use scala + * immutable lists, the tail of the result of plus is always the right argument */ class JListMonoid[T] extends Monoid[JList[T]] { override def isNonZero(x: JList[T]): Boolean = !x.isEmpty @@ -99,9 +99,8 @@ class JListMonoid[T] extends Monoid[JList[T]] { } /** - * Since maps are mutable, this always makes a full copy. Prefer scala immutable maps - * if you use scala immutable maps, this operation is much faster - * TODO extend this to Group, Ring + * Since maps are mutable, this always makes a full copy. Prefer scala immutable maps if you use scala + * immutable maps, this operation is much faster TODO extend this to Group, Ring */ class JMapMonoid[K, V: Semigroup] extends Monoid[JMap[K, V]] { override lazy val zero: JHashMap[K, V] = new JHashMap[K, V](0) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Last.scala b/algebird-core/src/main/scala/com/twitter/algebird/Last.scala index edfc4786a..5dcc32b89 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Last.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Last.scala @@ -18,30 +18,29 @@ package com.twitter.algebird import algebra.Band /** - * Tracks the "most recent", or last, wrapped instance of `T` by the - * order in which items are seen. + * Tracks the "most recent", or last, wrapped instance of `T` by the order in which items are seen. * - * @param get wrapped instance of `T` + * @param get + * wrapped instance of `T` */ case class Last[@specialized(Int, Long, Float, Double) +T](get: T) { /** * Returns the argument `r`, always. * - * @param r returned of `Last[U]` + * @param r + * returned of `Last[U]` */ def +[U >: T](r: Last[U]): Last[U] = r } /** - * Provides a set of operations and typeclass instances needed to use - * [[Last]] instances. + * Provides a set of operations and typeclass instances needed to use [[Last]] instances. */ object Last extends LastInstances { /** - * Returns an [[Aggregator]] that selects the last instance of `T` - * in the aggregated stream. + * Returns an [[Aggregator]] that selects the last instance of `T` in the aggregated stream. */ def aggregator[T]: LastAggregator[T] = LastAggregator() } @@ -49,8 +48,8 @@ object Last extends LastInstances { private[algebird] sealed abstract class LastInstances { /** - * Returns a [[Semigroup]] instance with a `plus` implementation - * that always returns the last (ie, the right) `T` argument. + * Returns a [[Semigroup]] instance with a `plus` implementation that always returns the last (ie, the + * right) `T` argument. */ def lastSemigroup[T]: Semigroup[T] with Band[T] = new Semigroup[T] with Band[T] { @@ -63,17 +62,15 @@ private[algebird] sealed abstract class LastInstances { } /** - * Returns a [[Semigroup]] instance for [[Last]][T]. The `plus` - * implementation always returns the last (ie, the right) `Last[T]` - * argument. + * Returns a [[Semigroup]] instance for [[Last]] [T]. The `plus` implementation always returns the last (ie, + * the right) `Last[T]` argument. */ implicit def semigroup[T]: Semigroup[Last[T]] with Band[Last[T]] = lastSemigroup[Last[T]] } /** - * [[Aggregator]] that selects the last instance of `T` in the - * aggregated stream. + * [[Aggregator]] that selects the last instance of `T` in the aggregated stream. */ case class LastAggregator[T]() extends Aggregator[T, T, T] { override def prepare(v: T): T = v diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala index e1543e79c..17cba9f59 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala @@ -192,15 +192,15 @@ object MapAlgebra { m.filter { case (_, v) => Monoid.isNonZero(v) } /** - * For each key, sum all the values. Note that if V is a Monoid, the current - * implementation will drop from the output any key where the values are all - * Monoid.zero. If the Semigroup is a Monoid, This function is equivalent to: + * For each key, sum all the values. Note that if V is a Monoid, the current implementation will drop from + * the output any key where the values are all Monoid.zero. If the Semigroup is a Monoid, This function is + * equivalent to: * - * pairs.filter(_._2 != Monoid.zero).groupBy(_._1).mapValues(_.map(_._2).sum) + * pairs.filter(_._2 != Monoid.zero).groupBy(_._1).mapValues(_.map(_._2).sum) * * Otherwise, the function is equivalent to: * - * pairs.groupBy(_._1).mapValues(_.map(_._2).sum) + * pairs.groupBy(_._1).mapValues(_.map(_._2).sum) */ def sumByKey[K, V: Semigroup](pairs: TraversableOnce[(K, V)]): Map[K, V] = Monoid.sum(pairs.iterator.map(Map(_))) @@ -208,7 +208,7 @@ object MapAlgebra { /** * For each key, creates a list of all values. This function is equivalent to: * - * pairs.groupBy(_._1).mapValues(_.map(_._2)) + * pairs.groupBy(_._1).mapValues(_.map(_._2)) */ def group[K, V](pairs: TraversableOnce[(K, V)]): Map[K, List[V]] = if (pairs.iterator.isEmpty) Map.empty @@ -245,8 +245,7 @@ object MapAlgebra { .transform { case (_, (v, w)) => (v.headOption, w.headOption) } /** - * Reverses a graph losslessly - * None key is for v's with no sources. + * Reverses a graph losslessly None key is for v's with no sources. */ def invertExact[K, V](m: Map[Option[K], Set[V]]): Map[Option[V], Set[K]] = { def nonEmptyIter[T](i: Iterable[T]): Iterable[Option[T]] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Max.scala b/algebird-core/src/main/scala/com/twitter/algebird/Max.scala index 86b6bd805..df95c4691 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Max.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Max.scala @@ -22,19 +22,19 @@ import algebra.{BoundedSemilattice, Semilattice} /** * Tracks the maximum wrapped instance of some ordered type `T`. * - * [[Max]][T] is a [[Semigroup]] for all types `T`. If `T` has some - * minimum element (`Long` has `Long.MinValue`, for example), then - * [[Max]][T] is a [[Monoid]]. + * [[Max]] [T] is a [[Semigroup]] for all types `T`. If `T` has some minimum element (`Long` has + * `Long.MinValue`, for example), then [[Max]] [T] is a [[Monoid]]. * - * @param get wrapped instance of `T` + * @param get + * wrapped instance of `T` */ case class Max[@specialized(Int, Long, Float, Double) +T](get: T) { /** - * If this instance wraps a larger `T` than `r`, returns this - * instance, else returns `r`. + * If this instance wraps a larger `T` than `r`, returns this instance, else returns `r`. * - * @param r instance of `Max[U]` for comparison + * @param r + * instance of `Max[U]` for comparison */ def max[U >: T](r: Max[U])(implicit ord: Ordering[U]): Max[U] = Max.ordering.max(this, r) @@ -42,27 +42,27 @@ case class Max[@specialized(Int, Long, Float, Double) +T](get: T) { /** * Identical to [[max]]. * - * @param r instance of `Max[U]` for comparison + * @param r + * instance of `Max[U]` for comparison */ def +[U >: T](r: Max[U])(implicit ord: Ordering[U]): Max[U] = max(r) } /** - * Provides a set of operations and typeclass instances needed to use - * [[Max]] instances. + * Provides a set of operations and typeclass instances needed to use [[Max]] instances. */ object Max extends MaxInstances { /** - * Returns an [[Aggregator]] that selects the maximum instance of an - * ordered type `T` in the aggregated stream. + * Returns an [[Aggregator]] that selects the maximum instance of an ordered type `T` in the aggregated + * stream. */ def aggregator[T](implicit ord: Ordering[T]): MaxAggregator[T] = MaxAggregator()(ord) /** - * Returns a [[Semigroup]] instance with a `plus` implementation - * that always returns the maximum `T` argument. + * Returns a [[Semigroup]] instance with a `plus` implementation that always returns the maximum `T` + * argument. */ def maxSemigroup[T](implicit ord: Ordering[T]): Semigroup[T] with Semilattice[T] = new Semigroup[T] with Semilattice[T] { @@ -81,17 +81,15 @@ private[algebird] sealed abstract class MaxInstances extends LowPriorityMaxInsta monoid(Long.MinValue) /** - * [[Monoid]] for [[Max]][Double] with `zero == Double.MinValue` - * Note: MinValue > NegativeInfinity, but people may - * be relying on this emitting a non-infinite number. Sadness + * [[Monoid]] for [[Max]] [Double] with `zero == Double.MinValue` Note: MinValue > NegativeInfinity, but + * people may be relying on this emitting a non-infinite number. Sadness */ implicit def doubleMonoid: Monoid[Max[Double]] with BoundedSemilattice[Max[Double]] = monoid(Double.MinValue) /** - * [[Monoid]] for [[Max]][Float] with `zero == Float.MinValue` - * Note: MinValue > NegativeInfinity, but people may - * be relying on this emitting a non-infinite number. Sadness + * [[Monoid]] for [[Max]] [Float] with `zero == Float.MinValue` Note: MinValue > NegativeInfinity, but + * people may be relying on this emitting a non-infinite number. Sadness */ implicit def floatMonoid: Monoid[Max[Float]] with BoundedSemilattice[Max[Float]] = monoid(Float.MinValue) @@ -105,11 +103,13 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { implicit def ordering[T: Ordering]: Ordering[Max[T]] = Ordering.by(_.get) /** - * Returns a [[Monoid]] instance for [[Max]][T] that combines - * instances using [[Max.max]] and uses `zero` for its identity. + * Returns a [[Monoid]] instance for [[Max]] [T] that combines instances using [[Max.max]] and uses `zero` + * for its identity. * - * @param zero identity of the returned [[Monoid]] instance - * @note `zero` must be `<=` every element of `T` for the returned instance to be lawful. + * @param zero + * identity of the returned [[Monoid]] instance + * @note + * `zero` must be `<=` every element of `T` for the returned instance to be lawful. */ def monoid[T: Ordering](zero: => T): Monoid[Max[T]] with BoundedSemilattice[Max[T]] = { val z = zero // avoid confusion below when overriding zero @@ -122,8 +122,8 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { } /** - * Returns a [[Semigroup]] instance for [[Max]][T]. The `plus` - * implementation always returns the maximum `Max[T]` argument. + * Returns a [[Semigroup]] instance for [[Max]] [T]. The `plus` implementation always returns the maximum + * `Max[T]` argument. */ implicit def semigroup[T: Ordering]: Semigroup[Max[T]] with Semilattice[Max[T]] = // There's no need to override `sumOption`, since the default @@ -136,9 +136,8 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { } /** - * Returns a [[Monoid]] instance for `Max[List[T]]` that compares - * lists first by length and then element-wise by `T`, and returns - * the maximum value. + * Returns a [[Monoid]] instance for `Max[List[T]]` that compares lists first by length and then + * element-wise by `T`, and returns the maximum value. */ implicit def listMonoid[T: Ordering]: Monoid[Max[List[T]]] with BoundedSemilattice[Max[List[T]]] = monoid[List[T]](Nil)(new Ordering[List[T]] { @@ -176,9 +175,8 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { } /** - * Returns a [[Monoid]] instance for `Max[Vector[T]]` that compares - * lists first by length and then element-wise by `T`, and returns - * the maximum value. + * Returns a [[Monoid]] instance for `Max[Vector[T]]` that compares lists first by length and then + * element-wise by `T`, and returns the maximum value. */ implicit def vectorMonoid[T: Ordering]: Monoid[Max[Vector[T]]] with BoundedSemilattice[Max[Vector[T]]] = monoid[Vector[T]](Vector.empty[T])(new Ordering[Vector[T]] { @@ -188,9 +186,8 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { }) /** - * Returns a [[Monoid]] instance for `Max[Stream[T]]` that compares - * lists first by length and then element-wise by `T`, and returns - * the maximum value. + * Returns a [[Monoid]] instance for `Max[Stream[T]]` that compares lists first by length and then + * element-wise by `T`, and returns the maximum value. */ implicit def streamMonoid[T: Ordering]: Monoid[Max[Stream[T]]] with BoundedSemilattice[Max[Stream[T]]] = monoid[Stream[T]](Stream.empty[T])(new Ordering[Stream[T]] { @@ -201,8 +198,7 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { } /** - * [[Aggregator]] that selects the maximum instance of `T` in the - * aggregated stream. + * [[Aggregator]] that selects the maximum instance of `T` in the aggregated stream. */ case class MaxAggregator[T]()(implicit val ord: Ordering[T]) extends Aggregator[T, T, T] { override def prepare(v: T): T = v diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala index 6e621c7ce..4fb32e05d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala @@ -17,12 +17,12 @@ limitations under the License. package com.twitter.algebird import java.lang.{ + Boolean => JBool, + Double => JDouble, + Float => JFloat, Integer => JInt, - Short => JShort, Long => JLong, - Float => JFloat, - Double => JDouble, - Boolean => JBool + Short => JShort } import scala.annotation.implicitNotFound @@ -30,10 +30,8 @@ import scala.annotation.implicitNotFound /** * A Metric[V] m is a function (V, V) => Double that satisfies the following properties: * - * 1. m(v1, v2) >= 0 - * 2. m(v1, v2) == 0 iff v1 == v2 - * 3. m(v1, v2) == m(v2, v1) - * 4. m(v1, v3) <= m(v1, v2) + m(v2, v3) + * 1. m(v1, v2) >= 0 2. m(v1, v2) == 0 iff v1 == v2 3. m(v1, v2) == m(v2, v1) 4. m(v1, v3) <= m(v1, v2) + + * m(v2, v3) * * If you implement this trait, make sure that you follow these rules. */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Min.scala b/algebird-core/src/main/scala/com/twitter/algebird/Min.scala index 69325f53b..5f41698e8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Min.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Min.scala @@ -20,19 +20,19 @@ import algebra.{BoundedSemilattice, Semilattice} /** * Tracks the minimum wrapped instance of some ordered type `T`. * - * [[Min]][T] is a [[Semigroup]] for all types `T`. If `T` has some - * maximum element (`Long` has `Long.MaxValue`, for example), then - * [[Min]][T] is a [[Monoid]]. + * [[Min]] [T] is a [[Semigroup]] for all types `T`. If `T` has some maximum element (`Long` has + * `Long.MaxValue`, for example), then [[Min]] [T] is a [[Monoid]]. * - * @param get wrapped instance of `T` + * @param get + * wrapped instance of `T` */ case class Min[@specialized(Int, Long, Float, Double) +T](get: T) { /** - * If this instance wraps a smaller `T` than `r`, returns this - * instance, else returns `r`. + * If this instance wraps a smaller `T` than `r`, returns this instance, else returns `r`. * - * @param r instance of `Min[U]` for comparison + * @param r + * instance of `Min[U]` for comparison */ def min[U >: T](r: Min[U])(implicit ord: Ordering[U]): Min[U] = Min.ordering.min(this, r) @@ -40,27 +40,27 @@ case class Min[@specialized(Int, Long, Float, Double) +T](get: T) { /** * Identical to [[min]]. * - * @param r instance of `Min[U]` for comparison + * @param r + * instance of `Min[U]` for comparison */ def +[U >: T](r: Min[U])(implicit ord: Ordering[U]): Min[U] = min(r) } /** - * Provides a set of operations and typeclass instances needed to use - * [[Min]] instances. + * Provides a set of operations and typeclass instances needed to use [[Min]] instances. */ object Min extends MinInstances { /** - * Returns an [[Aggregator]] that selects the minimum instance of an - * ordered type `T` in the aggregated stream. + * Returns an [[Aggregator]] that selects the minimum instance of an ordered type `T` in the aggregated + * stream. */ def aggregator[T](implicit ord: Ordering[T]): MinAggregator[T] = MinAggregator()(ord) /** - * Returns a [[Semigroup]] instance with a `plus` implementation - * that always returns the minimum `T` argument. + * Returns a [[Semigroup]] instance with a `plus` implementation that always returns the minimum `T` + * argument. */ def minSemigroup[T](implicit ord: Ordering[T]): Semigroup[T] with Semilattice[T] = new Semigroup[T] with Semilattice[T] { @@ -73,11 +73,13 @@ private[algebird] sealed abstract class MinInstances { implicit def ordering[T: Ordering]: Ordering[Min[T]] = Ordering.by(_.get) /** - * Returns a [[Monoid]] instance for [[Min]][T] that combines - * instances using [[Min.min]] and uses `zero` for its identity. + * Returns a [[Monoid]] instance for [[Min]] [T] that combines instances using [[Min.min]] and uses `zero` + * for its identity. * - * @param zero identity of the returned [[Monoid]] instance - * @note `zero` must be `>=` every element of `T` for the returned instance to be lawful. + * @param zero + * identity of the returned [[Monoid]] instance + * @note + * `zero` must be `>=` every element of `T` for the returned instance to be lawful. */ def monoid[T: Ordering](zero: => T): Monoid[Min[T]] with BoundedSemilattice[Min[T]] = { val z = zero // avoid confusion below when overriding zero @@ -90,8 +92,8 @@ private[algebird] sealed abstract class MinInstances { } /** - * Returns a [[Semigroup]] instance for [[Min]][T]. The `plus` - * implementation always returns the minimum `Min[T]` argument. + * Returns a [[Semigroup]] instance for [[Min]] [T]. The `plus` implementation always returns the minimum + * `Min[T]` argument. */ implicit def semigroup[T: Ordering]: Semigroup[Min[T]] with Semilattice[Min[T]] = new Semigroup[Min[T]] with Semilattice[Min[T]] { @@ -109,25 +111,22 @@ private[algebird] sealed abstract class MinInstances { monoid(Long.MaxValue) /** - * [[Monoid]] for [[Min]][Double] with `zero == Double.MaxValue` - * Note: MaxValue < PositiveInfinity, but people may - * be relying on this emitting a non-infinite number. Sadness + * [[Monoid]] for [[Min]] [Double] with `zero == Double.MaxValue` Note: MaxValue < PositiveInfinity, but + * people may be relying on this emitting a non-infinite number. Sadness */ implicit def doubleMonoid: Monoid[Min[Double]] with BoundedSemilattice[Min[Double]] = monoid(Double.MaxValue) /** - * [[Monoid]] for [[Min]][Float] with `zero == Float.MaxValue` - * Note: MaxValue < PositiveInfinity, but people may - * be relying on this emitting a non-infinite number. Sadness + * [[Monoid]] for [[Min]] [Float] with `zero == Float.MaxValue` Note: MaxValue < PositiveInfinity, but + * people may be relying on this emitting a non-infinite number. Sadness */ implicit def floatMonoid: Monoid[Min[Float]] with BoundedSemilattice[Min[Float]] = monoid(Float.MaxValue) } /** - * [[Aggregator]] that selects the minimum instance of `T` in the - * aggregated stream. + * [[Aggregator]] that selects the minimum instance of `T` in the aggregated stream. */ case class MinAggregator[T]()(implicit val ord: Ordering[T]) extends Aggregator[T, T, T] { override def prepare(v: T): T = v diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala index 3fc0b7cde..ada06450b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala @@ -3,9 +3,9 @@ package com.twitter.algebird import java.nio._ /** - * MinHasher as a Monoid operates on this class to avoid the too generic Array[Byte]. - * The bytes are assumed to be never modified. The only reason we did not use IndexedSeq[Byte] instead of Array[Byte] is - * because a ByteBuffer is used internally in MinHasher and it can wrap Array[Byte]. + * MinHasher as a Monoid operates on this class to avoid the too generic Array[Byte]. The bytes are assumed to + * be never modified. The only reason we did not use IndexedSeq[Byte] instead of Array[Byte] is because a + * ByteBuffer is used internally in MinHasher and it can wrap Array[Byte]. */ case class MinHashSignature(bytes: Array[Byte]) extends AnyVal @@ -28,27 +28,23 @@ object MinHasher { } /** - * Instances of MinHasher can create, combine, and compare fixed-sized signatures of - * arbitrarily sized sets. + * Instances of MinHasher can create, combine, and compare fixed-sized signatures of arbitrarily sized sets. * - * A signature is represented by a byte array of approx maxBytes size. - * You can initialize a signature with a single element, usually a Long or String. - * You can combine any two set's signatures to produce the signature of their union. - * You can compare any two set's signatures to estimate their Jaccard similarity. - * You can use a set's signature to estimate the number of distinct values in the set. - * You can also use a combination of the above to estimate the size of the intersection of - * two sets from their signatures. - * The more bytes in the signature, the more accurate all of the above will be. + * A signature is represented by a byte array of approx maxBytes size. You can initialize a signature with a + * single element, usually a Long or String. You can combine any two set's signatures to produce the signature + * of their union. You can compare any two set's signatures to estimate their Jaccard similarity. You can use + * a set's signature to estimate the number of distinct values in the set. You can also use a combination of + * the above to estimate the size of the intersection of two sets from their signatures. The more bytes in the + * signature, the more accurate all of the above will be. * - * You can also use these signatures to quickly find similar sets without doing - * n^2 comparisons. Each signature is assigned to several buckets; sets whose signatures - * end up in the same bucket are likely to be similar. The targetThreshold controls - * the desired level of similarity - the higher the threshold, the more efficiently - * you can find all the similar sets. + * You can also use these signatures to quickly find similar sets without doing n^2 comparisons. Each + * signature is assigned to several buckets; sets whose signatures end up in the same bucket are likely to be + * similar. The targetThreshold controls the desired level of similarity - the higher the threshold, the more + * efficiently you can find all the similar sets. * - * This abstract superclass is generic with regards to the size of the hash used. - * Depending on the number of unique values in the domain of the sets, you may want - * a MinHasher16, a MinHasher32, or a new custom subclass. + * This abstract superclass is generic with regards to the size of the hash used. Depending on the number of + * unique values in the domain of the sets, you may want a MinHasher16, a MinHasher32, or a new custom + * subclass. * * This implementation is modeled after Chapter 3 of Ullman and Rajaraman's Mining of Massive Datasets: * http://infolab.stanford.edu/~ullman/mmds/ch3a.pdf @@ -67,8 +63,8 @@ abstract class MinHasher[H](val numHashes: Int, val numBands: Int)(implicit n: N private val seed = 123456789 /** - * We always use a 128 bit hash function, so the number of hash functions is different - * (and usually smaller) than the number of hashes in the signature. + * We always use a 128 bit hash function, so the number of hash functions is different (and usually smaller) + * than the number of hashes in the signature. */ private val hashFunctions = { val r = new scala.util.Random(seed) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index 49989bffb..1aab47871 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -19,16 +19,14 @@ package com.twitter.algebird import algebra.{CommutativeGroup, CommutativeMonoid} /** - * A class to calculate the first five central moments over a sequence of Doubles. - * Given the first five central moments, we can then calculate metrics like skewness - * and kurtosis. + * A class to calculate the first five central moments over a sequence of Doubles. Given the first five + * central moments, we can then calculate metrics like skewness and kurtosis. * * m{i} denotes the ith central moment. * - * This code manually inlines code to make it look like a case class. This is done - * because we changed the count from a Long to a Double to enable the scale method, - * which allows exponential decays of moments, but we didn't want to break backwards - * binary compatibility. + * This code manually inlines code to make it look like a case class. This is done because we changed the + * count from a Long to a Double to enable the scale method, which allows exponential decays of moments, but + * we didn't want to break backwards binary compatibility. */ sealed class Moments(val m0D: Double, val m1: Double, val m2: Double, val m3: Double, val m4: Double) extends Product @@ -118,8 +116,8 @@ object Moments { Aggregator.prepareMonoid { n: N => Moments(num.toDouble(n)) } /** - * Create a Moments object given a single value. - * This is useful for initializing moment calculations at the start of a stream. + * Create a Moments object given a single value. This is useful for initializing moment calculations at the + * start of a stream. */ def apply[V: Numeric](value: V)(implicit num: Numeric[V]): Moments = new Moments(1.0, num.toDouble(value), 0, 0, 0) @@ -140,22 +138,20 @@ object Moments { Some((m.m0, m.m1, m.m2, m.m3, m.m4)) /** - * When combining averages, if the counts sizes are too close we - * should use a different algorithm. This constant defines how - * close the ratio of the smaller to the total count can be: + * When combining averages, if the counts sizes are too close we should use a different algorithm. This + * constant defines how close the ratio of the smaller to the total count can be: */ private[this] val STABILITY_CONSTANT = 0.1 /** - * Given two streams of doubles (weightN, an) and (weightK, ak) of form (weighted count, - * mean), calculates the mean of the combined stream. + * Given two streams of doubles (weightN, an) and (weightK, ak) of form (weighted count, mean), calculates + * the mean of the combined stream. * - * Uses a more stable online algorithm which should be suitable for - * large numbers of records similar to: + * Uses a more stable online algorithm which should be suitable for large numbers of records similar to: * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm * - * This differs from the implementation in MomentsGroup.scala only in that here, the counts are weighted, and are - * thus doubles instead of longs + * This differs from the implementation in MomentsGroup.scala only in that here, the counts are weighted, + * and are thus doubles instead of longs */ def getCombinedMeanDouble(weightN: Double, an: Double, weightK: Double, ak: Double): Double = if (weightN < weightK) getCombinedMeanDouble(weightK, ak, weightN, an) @@ -175,18 +171,16 @@ object Moments { class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { /** - * When combining averages, if the counts sizes are too close we - * should use a different algorithm. This constant defines how - * close the ratio of the smaller to the total count can be: + * When combining averages, if the counts sizes are too close we should use a different algorithm. This + * constant defines how close the ratio of the smaller to the total count can be: */ private val STABILITY_CONSTANT = 0.1 /** - * Given two streams of doubles (n, an) and (k, ak) of form (count, - * mean), calculates the mean of the combined stream. + * Given two streams of doubles (n, an) and (k, ak) of form (count, mean), calculates the mean of the + * combined stream. * - * Uses a more stable online algorithm which should be suitable for - * large numbers of records similar to: + * Uses a more stable online algorithm which should be suitable for large numbers of records similar to: * http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm * * we no longer use this, but we can't remove it due to binary compatibility @@ -297,12 +291,10 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { } /** - * This should not be used as a group (avoid negate and minus). It was wrongly - * believed that this was a group for several years in this code, however - * it was only being tested with positive counts (which is to say the generators - * were too weak). It isn't the case that minus and negate are totally wrong - * but (a - a) + b in general isn't associative: it won't equal a - (a - b) - * which it should. + * This should not be used as a group (avoid negate and minus). It was wrongly believed that this was a group + * for several years in this code, however it was only being tested with positive counts (which is to say the + * generators were too weak). It isn't the case that minus and negate are totally wrong but (a - a) + b in + * general isn't associative: it won't equal a - (a - b) which it should. */ @deprecated("use Moments.momentsMonoid, this isn't lawful for negative counts", "0.13.8") object MomentsGroup extends MomentsMonoid with Group[Moments] with CommutativeGroup[Moments] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala index 0a74804aa..de8c31a71 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monad.scala @@ -21,16 +21,12 @@ import scala.concurrent.{ExecutionContext, Future} import collection.GenTraversable /** - * Simple implementation of a Monad type-class. - * Subclasses only need to override apply and flatMap, but they should override map, - * join, joinWith, and sequence if there are better implementations. + * Simple implementation of a Monad type-class. Subclasses only need to override apply and flatMap, but they + * should override map, join, joinWith, and sequence if there are better implementations. * - * Laws Monads must follow: - * identities: - * flatMap(apply(x))(fn) == fn(x) - * flatMap(m)(apply _) == m - * associativity on flatMap (you can either flatMap f first, or f to g: - * flatMap(flatMap(m)(f))(g) == flatMap(m) { x => flatMap(f(x))(g) } + * Laws Monads must follow: identities: flatMap(apply(x))(fn) == fn(x) flatMap(m)(apply _) == m associativity + * on flatMap (you can either flatMap f first, or f to g: flatMap(flatMap(m)(f))(g) == flatMap(m) { x => + * flatMap(f(x))(g) } */ @implicitNotFound(msg = "Cannot find Monad type class for ${M}") trait Monad[M[_]] extends Applicative[M] { @@ -129,8 +125,7 @@ object Monad { } /** - * This enrichment allows us to use our Monad instances in for expressions: - * if (import Monad._) has been done + * This enrichment allows us to use our Monad instances in for expressions: if (import Monad._) has been done */ class MonadOperators[A, M[_]](m: M[A])(implicit monad: Monad[M]) extends ApplicativeOperators[A, M](m) { def flatMap[U](fn: (A) => M[U]): M[U] = monad.flatMap(m)(fn) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index 844549401..b004d7b07 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -22,20 +22,20 @@ import scala.math.Equiv import scala.reflect.ClassTag import java.lang.{ + Boolean => JBool, + Double => JDouble, + Float => JFloat, Integer => JInt, - Short => JShort, Long => JLong, - Float => JFloat, - Double => JDouble, - Boolean => JBool + Short => JShort } import java.util.{List => JList, Map => JMap} import scala.collection.{Map => ScMap} /** - * Monoid (take a deep breath, and relax about the weird name): - * This is a semigroup that has an additive identity (called zero), such that a+0=a, 0+a=a, for every a + * Monoid (take a deep breath, and relax about the weird name): This is a semigroup that has an additive + * identity (called zero), such that a+0=a, 0+a=a, for every a */ @implicitNotFound(msg = "Cannot find Monoid type class for ${T}") trait Monoid[@specialized(Int, Long, Float, Double) T] @@ -72,8 +72,7 @@ trait Monoid[@specialized(Int, Long, Float, Double) T] abstract class AbstractMonoid[T] extends Monoid[T] /** - * Some(5) + Some(3) == Some(8) - * Some(5) + None == Some(5) + * Some(5) + Some(3) == Some(8) Some(5) + None == Some(5) */ class OptionMonoid[T](implicit semi: Semigroup[T]) extends Monoid[Option[T]] { override def zero: None.type = None @@ -105,8 +104,7 @@ object StringMonoid extends Monoid[String] { } /** - * List concatenation monoid. - * plus means concatenation, zero is empty list + * List concatenation monoid. plus means concatenation, zero is empty list */ class ListMonoid[T] extends Monoid[List[T]] { override def zero: List[T] = List[T]() @@ -138,9 +136,8 @@ class SeqMonoid[T] extends Monoid[Seq[T]] { /** * Pair-wise sum Array monoid. * - * plus returns left[i] + right[i] for all array elements. - * The resulting array will be as long as the longest array (with its elements duplicated) - * zero is an empty array + * plus returns left[i] + right[i] for all array elements. The resulting array will be as long as the longest + * array (with its elements duplicated) zero is an empty array */ class ArrayMonoid[T: ClassTag](implicit semi: Semigroup[T]) extends Monoid[Array[T]] { @@ -163,8 +160,7 @@ class ArrayMonoid[T: ClassTag](implicit semi: Semigroup[T]) extends Monoid[Array } /** - * Set union monoid. - * plus means union, zero is empty set + * Set union monoid. plus means union, zero is empty set */ class SetMonoid[T] extends Monoid[Set[T]] { override def zero: Set[T] = Set[T]() @@ -184,8 +180,7 @@ class SetMonoid[T] extends Monoid[Set[T]] { } /** - * Function1 monoid. - * plus means function composition, zero is the identity function + * Function1 monoid. plus means function composition, zero is the identity function */ class Function1Monoid[T] extends Monoid[Function1[T, T]] { override def zero: T => T = identity[T] @@ -210,8 +205,7 @@ object OrVal { } /** - * Boolean OR monoid. - * plus means logical OR, zero is false. + * Boolean OR monoid. plus means logical OR, zero is false. */ object OrValMonoid extends Monoid[OrVal] { override def zero: OrVal = OrVal(false) @@ -236,8 +230,7 @@ object AndVal { } /** - * Boolean AND monoid. - * plus means logical AND, zero is true. + * Boolean AND monoid. plus means logical AND, zero is true. */ object AndValMonoid extends Monoid[AndVal] { override def zero: AndVal = AndVal(true) @@ -283,16 +276,15 @@ object Monoid extends GeneratedMonoidImplicits with ProductMonoids with FromAlge } /** - * Return an Equiv[T] that uses isNonZero to return equality for all zeros - * useful for Maps/Vectors that have many equivalent in memory representations of zero + * Return an Equiv[T] that uses isNonZero to return equality for all zeros useful for Maps/Vectors that have + * many equivalent in memory representations of zero */ def zeroEquiv[T: Equiv: Monoid]: Equiv[T] = Equiv.fromFunction { (a: T, b: T) => (!isNonZero(a) && !isNonZero(b)) || Equiv[T].equiv(a, b) } /** - * Same as v + v + v .. + v (i times in total) - * requires i >= 0, wish we had NonnegativeBigInt as a class + * Same as v + v + v .. + v (i times in total) requires i >= 0, wish we had NonnegativeBigInt as a class */ def intTimes[T](i: BigInt, v: T)(implicit mon: Monoid[T]): T = { require(i >= 0, "Cannot do negative products with a Monoid, try Group.intTimes") diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala b/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala index 880627177..9a46c54c7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Predecessible.scala @@ -16,10 +16,9 @@ limitations under the License. package com.twitter.algebird /** - * This is a typeclass to represent things which are countable down. Note that it is important - * that a value prev(t) is always less than t. Note - * that prev returns Option because this class comes with the notion that some items may reach a minimum - * key, which is None. + * This is a typeclass to represent things which are countable down. Note that it is important that a value + * prev(t) is always less than t. Note that prev returns Option because this class comes with the notion that + * some items may reach a minimum key, which is None. */ trait Predecessible[T] extends java.io.Serializable { def prev(old: T): Option[T] @@ -54,8 +53,8 @@ trait Predecessible[T] extends java.io.Serializable { object Predecessible extends java.io.Serializable { /** - * This makes it easy to construct from a function when T has an ordering, which is common - * Note, your function must respect the ordering + * This makes it easy to construct from a function when T has an ordering, which is common Note, your + * function must respect the ordering */ def fromPrevOrd[T](prevFn: T => Option[T])(implicit ord: Ordering[T]): Predecessible[T] = new Predecessible[T] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala b/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala index 83ef4dfa4..a10d6d8a8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Preparer.scala @@ -3,13 +3,12 @@ package com.twitter.algebird import java.util.PriorityQueue /** - * Preparer is a way to build up an Aggregator through composition using a - * more natural API: it allows you to start with the input type and describe a series - * of transformations and aggregations from there, rather than starting from the aggregation - * and composing "outwards" in both directions. + * Preparer is a way to build up an Aggregator through composition using a more natural API: it allows you to + * start with the input type and describe a series of transformations and aggregations from there, rather than + * starting from the aggregation and composing "outwards" in both directions. * - * Uses of Preparer will always start with a call to Preparer[A], and end with a call to - * monoidAggregate or a related method, to produce an Aggregator instance. + * Uses of Preparer will always start with a call to Preparer[A], and end with a call to monoidAggregate or a + * related method, to produce an Aggregator instance. */ sealed trait Preparer[A, T] extends java.io.Serializable { @@ -19,9 +18,8 @@ sealed trait Preparer[A, T] extends java.io.Serializable { def monoidAggregate[B, C](aggregator: MonoidAggregator[T, B, C]): MonoidAggregator[A, B, C] /** - * Produce a new Preparer that chains this one-to-many transformation. - * Because "many" could include "none", this limits future aggregations - * to those done using monoids. + * Produce a new Preparer that chains this one-to-many transformation. Because "many" could include "none", + * this limits future aggregations to those done using monoids. */ def flatMap[U](fn: T => TraversableOnce[U]): FlatMapPreparer[A, U] @@ -31,8 +29,8 @@ sealed trait Preparer[A, T] extends java.io.Serializable { def flatten[U](implicit ev: <:<[T, TraversableOnce[U]]): FlatMapPreparer[A, U] = flatMap(ev) /** - * Filter out values that do not meet the predicate. - * Like flatMap, this limits future aggregations to MonoidAggregator. + * Filter out values that do not meet the predicate. Like flatMap, this limits future aggregations to + * MonoidAggregator. */ def filter(fn: T => Boolean): FlatMapPreparer[A, T] = flatMap(t => if (fn(t)) Some(t) else None) @@ -40,8 +38,8 @@ sealed trait Preparer[A, T] extends java.io.Serializable { flatMap(t => if (p.isDefinedAt(t)) Some(p(t)) else None) /** - * count and following methods all just call monoidAggregate with one of the standard Aggregators. - * see the Aggregator object for more docs. + * count and following methods all just call monoidAggregate with one of the standard Aggregators. see the + * Aggregator object for more docs. */ def count(pred: T => Boolean): MonoidAggregator[A, Long, Long] = monoidAggregate(Aggregator.count(pred)) def exists(pred: T => Boolean): MonoidAggregator[A, Boolean, Boolean] = @@ -63,15 +61,15 @@ sealed trait Preparer[A, T] extends java.io.Serializable { def uniqueCount: MonoidAggregator[A, Set[T], Int] = monoidAggregate(Aggregator.uniqueCount) /** - * transform a given Aggregator into a MonoidAggregator by lifting the reduce and present stages - * into Option space + * transform a given Aggregator into a MonoidAggregator by lifting the reduce and present stages into Option + * space */ def lift[B, C](aggregator: Aggregator[T, B, C]): MonoidAggregator[A, Option[B], Option[C]] = monoidAggregate(aggregator.lift) /** - * headOption and following methods are all just calling lift with standard Aggregators - * see the Aggregator object for more docs + * headOption and following methods are all just calling lift with standard Aggregators see the Aggregator + * object for more docs */ def headOption: MonoidAggregator[A, Option[T], Option[T]] = lift(Aggregator.head) def lastOption: MonoidAggregator[A, Option[T], Option[T]] = lift(Aggregator.last) @@ -102,8 +100,8 @@ object Preparer { } /** - * A Preparer that has had zero or more map transformations applied, but no flatMaps. - * This can produce any type of Aggregator. + * A Preparer that has had zero or more map transformations applied, but no flatMaps. This can produce any + * type of Aggregator. */ trait MapPreparer[A, T] extends Preparer[A, T] { @@ -125,10 +123,9 @@ trait MapPreparer[A, T] extends Preparer[A, T] { aggregator.composePrepare(prepareFn) /** - * Split the processing into two parallel aggregations. - * You provide a function which produces two different aggregators from this preparer, - * and it will return a single aggregator which does both aggregations in parallel. - * (See also Aggregator's join method.) + * Split the processing into two parallel aggregations. You provide a function which produces two different + * aggregators from this preparer, and it will return a single aggregator which does both aggregations in + * parallel. (See also Aggregator's join method.) * * We really need to generate N versions of this for 3-way, 4-way etc splits. */ @@ -140,8 +137,8 @@ trait MapPreparer[A, T] extends Preparer[A, T] { } /** - * head and following methods all just call aggregate with one of the standard Aggregators. - * see the Aggregator object for more docs. + * head and following methods all just call aggregate with one of the standard Aggregators. see the + * Aggregator object for more docs. */ def head: Aggregator[A, T, T] = aggregate(Aggregator.head) def last: Aggregator[A, T, T] = aggregate(Aggregator.last) @@ -169,8 +166,8 @@ object MapPreparer { def apply[A, T](fn: A => T): MapPreparer[A, T] = new MapPreparer[A, T] { val prepareFn: A => T = fn } /** - * This is purely an optimization for the case of mapping by identity. - * It overrides the key methods to not actually use the identity function. + * This is purely an optimization for the case of mapping by identity. It overrides the key methods to not + * actually use the identity function. */ def identity[A]: MapPreparer[A, A] = new MapPreparer[A, A] { override val prepareFn: A => A = (a: A) => a @@ -183,8 +180,7 @@ object MapPreparer { } /** - * A Preparer that has had one or more flatMap operations applied. - * It can only accept MonoidAggregators. + * A Preparer that has had one or more flatMap operations applied. It can only accept MonoidAggregators. */ trait FlatMapPreparer[A, T] extends Preparer[A, T] { @@ -200,8 +196,7 @@ trait FlatMapPreparer[A, T] extends Preparer[A, T] { aggregator.sumBefore.composePrepare(prepareFn) /** - * alias of monoidAggregate for convenience - * unlike MapPreparer's aggregate, can only take MonoidAggregator + * alias of monoidAggregate for convenience unlike MapPreparer's aggregate, can only take MonoidAggregator */ def aggregate[B, C](aggregator: MonoidAggregator[T, B, C]): MonoidAggregator[A, B, C] = monoidAggregate(aggregator) @@ -213,10 +208,9 @@ trait FlatMapPreparer[A, T] extends Preparer[A, T] { monoidAggregate(Aggregator.fromMonoid(monoid)) /** - * Split the processing into two parallel aggregations. - * You provide a function which produces two different aggregators from this preparer, - * and it will return a single aggregator which does both aggregations in parallel. - * (See also Aggregator's join method.) + * Split the processing into two parallel aggregations. You provide a function which produces two different + * aggregators from this preparer, and it will return a single aggregator which does both aggregations in + * parallel. (See also Aggregator's join method.) * * We really need to generate N versions of this for 3-way, 4-way etc splits. */ @@ -241,8 +235,8 @@ object FlatMapPreparer { } /** - * This is purely an optimization for the case of flatMapping by identity. - * It overrides the key methods to not actually use the identity function. + * This is purely an optimization for the case of flatMapping by identity. It overrides the key methods to + * not actually use the identity function. */ def identity[A]: FlatMapPreparer[TraversableOnce[A], A] = new FlatMapPreparer[TraversableOnce[A], A] { override val prepareFn: TraversableOnce[A] => TraversableOnce[A] = (a: TraversableOnce[A]) => a diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Priority.scala b/algebird-core/src/main/scala/com/twitter/algebird/Priority.scala index e7cdcd8da..fa9a21338 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Priority.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Priority.scala @@ -3,13 +3,12 @@ package com.twitter.algebird /** * Priority is a type class for prioritized implicit search. * - * This type class will attempt to provide an implicit instance of `P` - * (the preferred type). If that type is not available it will - * fallback to `F` (the fallback type). If neither type is available - * then a `Priority[P, F]` instance will not be available. + * This type class will attempt to provide an implicit instance of `P` (the preferred type). If that type is + * not available it will fallback to `F` (the fallback type). If neither type is available then a `Priority[P, + * F]` instance will not be available. * - * This type can be useful for problems where multiple algorithms can - * be used, depending on the type classes available. + * This type can be useful for problems where multiple algorithms can be used, depending on the type classes + * available. * * taken from non/algebra until we make algebird depend on non/algebra */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index 3441e7cf7..fd21ef7bf 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -19,22 +19,25 @@ package com.twitter.algebird import scala.util.hashing.MurmurHash3 /** - * A QTree provides an approximate Map[Double,A:Monoid] suitable for range queries, quantile queries, - * and combinations of these (for example, if you use a numeric A, you can derive the inter-quartile mean). + * A QTree provides an approximate Map[Double,A:Monoid] suitable for range queries, quantile queries, and + * combinations of these (for example, if you use a numeric A, you can derive the inter-quartile mean). * - * It is loosely related to the Q-Digest data structure from http://www.cs.virginia.edu/~son/cs851/papers/ucsb.sensys04.pdf, - * but using an immutable tree structure, and carrying a generalized sum (of type A) at each node instead of just a count. + * It is loosely related to the Q-Digest data structure from + * http://www.cs.virginia.edu/~son/cs851/papers/ucsb.sensys04.pdf, but using an immutable tree structure, and + * carrying a generalized sum (of type A) at each node instead of just a count. * - * The basic idea is to keep a binary tree, where the root represents the entire range of the input keys, - * and each child node represents either the lower or upper half of its parent's range. Ranges are constrained to be - * dyadic intervals (https://en.wikipedia.org/wiki/Interval_(mathematics)#Dyadic_intervals) for ease of merging. + * The basic idea is to keep a binary tree, where the root represents the entire range of the input keys, and + * each child node represents either the lower or upper half of its parent's range. Ranges are constrained to + * be dyadic intervals (https://en.wikipedia.org/wiki/Interval_(mathematics)#Dyadic_intervals) for ease of + * merging. * * To keep the size bounded, the total count carried by any sub-tree must be at least 1/(2^k) of the total - * count at the root. Any sub-trees that do not meet this criteria have their children pruned and become leaves. - * (It's important that they not be pruned away entirely, but that we keep a fringe of low-count leaves that can - * gain weight over time and ultimately split again when warranted). + * count at the root. Any sub-trees that do not meet this criteria have their children pruned and become + * leaves. (It's important that they not be pruned away entirely, but that we keep a fringe of low-count + * leaves that can gain weight over time and ultimately split again when warranted). * - * Quantile and range queries both give hard upper and lower bounds; the true result will be somewhere in the range given. + * Quantile and range queries both give hard upper and lower bounds; the true result will be somewhere in the + * range given. * * Keys must be >= 0. */ @@ -71,18 +74,16 @@ object QTree { } /** - * The common case of wanting an offset and sum for the same value - * This is useful if you want to query the mean inside a range later. - * If you truly just care about the counts/histogram, see the value method. + * The common case of wanting an offset and sum for the same value This is useful if you want to query the + * mean inside a range later. If you truly just care about the counts/histogram, see the value method. */ def apply(k: Long): QTree[Long] = apply(k -> k) /** - * uses 1/65636 as the bin size, if you want to control that see other apply - * or value methods. + * uses 1/65636 as the bin size, if you want to control that see other apply or value methods. * - * This is useful if you want to query the mean inside a range later. - * If you truly just care about the counts/histogram, see the value method. + * This is useful if you want to query the mean inside a range later. If you truly just care about the + * counts/histogram, see the value method. */ def apply(k: Double): QTree[Double] = apply(k -> k) @@ -93,15 +94,14 @@ object QTree { Some((qtree.offset, qtree.level, qtree.count, qtree.sum, qtree.lowerChild, qtree.upperChild)) /** - * If you are sure you only care about the approximate histogram - * features of QTree, you can save some space by using QTree[Unit] + * If you are sure you only care about the approximate histogram features of QTree, you can save some space + * by using QTree[Unit] */ def value(v: Long): QTree[Unit] = apply(v -> (())) /** - * If you are sure you only care about the approximate histogram - * features of QTree, you can save some space by using QTree[Unit] - * level gives a bin size of 2^level. By default this is 1/65536 (level = -16) + * If you are sure you only care about the approximate histogram features of QTree, you can save some space + * by using QTree[Unit] level gives a bin size of 2^level. By default this is 1/65536 (level = -16) */ def value(v: Double, level: Int = DefaultLevel): QTree[Unit] = apply(v -> (()), level) @@ -250,9 +250,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * Find the smallest dyadic interval that contains the dyadic interval - * for this tree's root and the other tree's root, and return its - * level (that is, the power of 2 for the interval). + * Find the smallest dyadic interval that contains the dyadic interval for this tree's root and the other + * tree's root, and return its level (that is, the power of 2 for the interval). */ private def commonAncestorLevel(other: QTree[A]) = { val minLevel = _level.min(other.level) @@ -268,11 +267,9 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * This merges with another QTree but DOES NOT compress. - * You should probably never use this and instead use - * QTreeSemigroup.plus(a, b) or .sumOption. Strongly - * prefer sumOption if you can, as it is much more efficient - * due to compressing less frequently. + * This merges with another QTree but DOES NOT compress. You should probably never use this and instead use + * QTreeSemigroup.plus(a, b) or .sumOption. Strongly prefer sumOption if you can, as it is much more + * efficient due to compressing less frequently. */ def merge(other: QTree[A])(implicit monoid: Monoid[A]): QTree[A] = { val commonAncestor = commonAncestorLevel(other) @@ -282,9 +279,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * give lower and upper bounds respectively of the percentile - * value given. For instance, quantileBounds(0.5) would give - * an estimate of the median. + * give lower and upper bounds respectively of the percentile value given. For instance, quantileBounds(0.5) + * would give an estimate of the median. */ def quantileBounds(p: Double): (Double, Double) = { require(p >= 0.0 && p <= 1.0, "The given percentile must be of the form 0 <= p <= 1.0") @@ -319,8 +315,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * Get the bounds on the sums within a range (not percentile) - * This along with the rangeCountBounds can tell you the mean over a range + * Get the bounds on the sums within a range (not percentile) This along with the rangeCountBounds can tell + * you the mean over a range */ def rangeSumBounds(from: Double, to: Double)(implicit monoid: Monoid[A]): (A, A) = if (from <= lowerBound && to >= upperBound) { @@ -352,10 +348,9 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * Users should never need to call this if they are adding QTrees using the Semigroup - * This makes sure no element in the tree has count less than - * the total count / 2^k. That means after this call there - * are at most 2^k nodes, but usually fewer. + * Users should never need to call this if they are adding QTrees using the Semigroup This makes sure no + * element in the tree has count less than the total count / 2^k. That means after this call there are at + * most 2^k nodes, but usually fewer. */ def compress(k: Int)(implicit m: Monoid[A]): QTree[A] = { val minCount = _count >> k @@ -398,9 +393,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * How many total nodes are there in the QTree. - * Not meaningful for learning statistics, but interesting - * to estimate serialization size. + * How many total nodes are there in the QTree. Not meaningful for learning statistics, but interesting to + * estimate serialization size. */ def size: Int = { val childSizes = mapChildrenWithDefault(0)(_.size) @@ -439,10 +433,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( } /** - * This gives you the mean for the middle 50%-ile. - * This probably only makes sense if the Monoid[A] is - * equivalent to addition in Numeric[A], which is only - * used to convert to Double at the end + * This gives you the mean for the middle 50%-ile. This probably only makes sense if the Monoid[A] is + * equivalent to addition in Numeric[A], which is only used to convert to Double at the end */ def interQuartileMean(implicit n: Numeric[A], m: Monoid[A]): (Double, Double) = { val (l25, u25) = quantileBounds(0.25) @@ -466,8 +458,7 @@ trait QTreeAggregatorLike[T] { def k: Int /** - * We convert T to a Double, then the Double is converted - * to a Long by using a 2^level bucket size. + * We convert T to a Double, then the Double is converted to a Long by using a 2^level bucket size. */ def level: Int = QTree.DefaultLevel implicit def num: Numeric[T] @@ -480,9 +471,9 @@ object QTreeAggregator { } /** - * QTree aggregator is an aggregator that can be used to find the approximate percentile bounds. - * The items that are iterated over to produce this approximation cannot be negative. - * Returns an Intersection which represents the bounded approximation. + * QTree aggregator is an aggregator that can be used to find the approximate percentile bounds. The items + * that are iterated over to produce this approximation cannot be negative. Returns an Intersection which + * represents the bounded approximation. */ case class QTreeAggregator[T]( override val percentile: Double, @@ -499,10 +490,10 @@ case class QTreeAggregator[T]( } /** - * QTreeAggregatorLowerBound is an aggregator that is used to find an appoximate percentile. - * This is similar to a QTreeAggregator, but is a convenience because instead of returning an Intersection, - * it instead returns the lower bound of the percentile. - * Like a QTreeAggregator, the items that are iterated over to produce this approximation cannot be negative. + * QTreeAggregatorLowerBound is an aggregator that is used to find an appoximate percentile. This is similar + * to a QTreeAggregator, but is a convenience because instead of returning an Intersection, it instead returns + * the lower bound of the percentile. Like a QTreeAggregator, the items that are iterated over to produce this + * approximation cannot be negative. */ case class QTreeAggregatorLowerBound[T]( override val percentile: Double, diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala index f6bfc76b6..63273e575 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala @@ -16,10 +16,8 @@ limitations under the License. package com.twitter.algebird /** - * Used to represent cases where we need to periodically reset - * a + b = a + b - * |a + b = |(a + b) - * a + |b = |b + * Used to represent cases where we need to periodically reset a + b = a + b + * |a + b = |(a + b) a + |b = |b * |a + |b = |b */ sealed trait ResetState[+A] { def get: A } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala index bc15bf44c..b5f20c328 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala @@ -17,12 +17,11 @@ limitations under the License. package com.twitter.algebird /** - * This is an associative, but not commutative monoid - * Also, you must start on the right, with a value, and all subsequent RightFolded must - * be RightFoldedToFold objects or zero + * This is an associative, but not commutative monoid Also, you must start on the right, with a value, and all + * subsequent RightFolded must be RightFoldedToFold objects or zero * - * If you add two Folded values together, you always get the one on the left, - * so this forms a kind of reset of the fold. + * If you add two Folded values together, you always get the one on the left, so this forms a kind of reset of + * the fold. */ object RightFolded { def monoid[In, Out](foldfn: (In, Out) => Out): Monoid[RightFolded[In, Out]] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala index 8f906379f..a9aba0d98 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/RightFolded2.scala @@ -17,20 +17,16 @@ limitations under the License. package com.twitter.algebird /** - * This monoid takes a list of values of type In or Out, - * and folds to the right all the Ins into Out values, leaving - * you with a list of Out values, then finally, maps those outs - * onto Acc, where there is a group, and adds all the Accs up. - * So, if you have a list: - * I I I O I O O I O I O - * the monoid is equivalent to the computation: + * This monoid takes a list of values of type In or Out, and folds to the right all the Ins into Out values, + * leaving you with a list of Out values, then finally, maps those outs onto Acc, where there is a group, and + * adds all the Accs up. So, if you have a list: I I I O I O O I O I O the monoid is equivalent to the + * computation: * - * map(fold(List(I,I,I),O)) + map(fold(List(I),O)) + map(fold(List(),O)) + - * map(fold(List(I),O)) + map(fold(List(I),O)) + * map(fold(List(I,I,I),O)) + map(fold(List(I),O)) + map(fold(List(),O)) + map(fold(List(I),O)) + + * map(fold(List(I),O)) * - * This models a version of the map/reduce paradigm, where the fold happens - * on the mappers for each group on Ins, and then they are mapped to Accs, - * sent to a single reducer and all the Accs are added up. + * This models a version of the map/reduce paradigm, where the fold happens on the mappers for each group on + * Ins, and then they are mapped to Accs, sent to a single reducer and all the Accs are added up. */ object RightFolded2 { def monoid[In, Out: Group](foldfn: (In, Out) => Out): RightFolded2Monoid[In, Out, Out] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala index 0092b92ee..3ebfc547c 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Ring.scala @@ -16,38 +16,31 @@ limitations under the License. package com.twitter.algebird import java.lang.{ + Boolean => JBool, + Double => JDouble, + Float => JFloat, Integer => JInt, - Short => JShort, Long => JLong, - Float => JFloat, - Double => JDouble, - Boolean => JBool + Short => JShort } -import algebra.ring.{Ring => ARing, Rig, Rng} +import algebra.ring.{Rig, Ring => ARing, Rng} import algebra.CommutativeGroup import scala.annotation.implicitNotFound /** - * Ring: Group + multiplication (see: http://en.wikipedia.org/wiki/Ring_%28mathematics%29) - * and the three elements it defines: - * - additive identity aka zero - * - addition - * - multiplication + * Ring: Group + multiplication (see: http://en.wikipedia.org/wiki/Ring_%28mathematics%29) and the three + * elements it defines: + * - additive identity aka zero + * - addition + * - multiplication * - * Note, if you have distributive property, additive inverses, and multiplicative identity you - * can prove you have a commutative group under the ring: + * Note, if you have distributive property, additive inverses, and multiplicative identity you can prove you + * have a commutative group under the ring: * - * 1. (a + 1)*(b + 1) = a(b + 1) + (b + 1) - * 2. = ab + a + b + 1 - * 3. or: - * 4. - * 5. = (a + 1)b + (a + 1) - * 6. = ab + b + a + 1 - * 7. - * 8. So: ab + a + b + 1 == ab + b + a + 1 - * 9. using the fact that -(ab) and -1 exist, we get: - * 10. a + b == b + a + * 1. (a + 1)*(b + 1) = a(b + 1) + (b + 1) 2. = ab + a + b + 1 3. or: 4. 5. = (a + 1)b + (a + 1) 6. = ab + b + * + a + 1 7. 8. So: ab + a + b + 1 == ab + b + a + 1 9. using the fact that -(ab) and -1 exist, we get: + * 10. a + b == b + a */ @implicitNotFound(msg = "Cannot find Ring type class for ${T}") @@ -221,10 +214,9 @@ class FromAlgebraRing[T](r: ARing[T]) extends Ring[T] { } /** - * In some legacy cases, we have implemented Rings where we lacked - * the full laws. This allows you to be precise (only implement - * the structure you have), but unsafely use it as a Ring in legacy code - * that is expecting a Ring. + * In some legacy cases, we have implemented Rings where we lacked the full laws. This allows you to be + * precise (only implement the structure you have), but unsafely use it as a Ring in legacy code that is + * expecting a Ring. */ class UnsafeFromAlgebraRig[T](r: Rig[T]) extends Ring[T] { override def zero: T = r.zero @@ -241,10 +233,9 @@ class UnsafeFromAlgebraRig[T](r: Rig[T]) extends Ring[T] { } /** - * In some legacy cases, we have implemented Rings where we lacked - * the full laws. This allows you to be precise (only implement - * the structure you have), but unsafely use it as a Ring in legacy code - * that is expecting a Ring. + * In some legacy cases, we have implemented Rings where we lacked the full laws. This allows you to be + * precise (only implement the structure you have), but unsafely use it as a Ring in legacy code that is + * expecting a Ring. */ class UnsafeFromAlgebraRng[T](r: Rng[T]) extends Ring[T] { override def zero: T = r.zero diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala index 643aa8ae2..5ec0b4a0d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala @@ -18,10 +18,8 @@ package com.twitter.algebird object SGD { /** - * constructs the gradient for linear regression. - * the Pos type is (Double, IndexedSeq[Double]) - * note the LAST element in the weights is the constant term. - * and note that the length of the IndexedSeq in the tuple is + * constructs the gradient for linear regression. the Pos type is (Double, IndexedSeq[Double]) note the LAST + * element in the weights is the constant term. and note that the length of the IndexedSeq in the tuple is * one less than the weights (we don't carry the constant term) */ val linearGradient: (IndexedSeq[Double], (Double, IndexedSeq[Double])) => IndexedSeq[Double] = { (w, pos) => @@ -77,11 +75,9 @@ object SGDPos { case class SGDPos[+Pos](val pos: List[Pos]) extends SGD[Pos] /** - * Basically a specific implementation of the RightFoldedMonoid - * gradient is the gradient of the function to be minimized - * To use this, you need to insert an initial weight SGDWeights - * before you start adding SGDPos objects. Otherwise you will - * just be doing list concatenation. + * Basically a specific implementation of the RightFoldedMonoid gradient is the gradient of the function to be + * minimized To use this, you need to insert an initial weight SGDWeights before you start adding SGDPos + * objects. Otherwise you will just be doing list concatenation. */ class SGDMonoid[Pos]( stepfn: (Long, IndexedSeq[Double]) => Double, diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala index 7d3ef485b..ff0dce400 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala @@ -29,15 +29,20 @@ object Scan { } /** - * Scans take streams of inputs to streams of outputs, but some scans have trivial inputs and just produce a stream of - * outputs. Streams can be thought of as being a hidden state that is queryable for a head element, and another hidden - * state that represents the rest of the stream. - * @param initState The initial state of the scan; think of this as an infinite stream. - * @param destructor This function decomposes a stream into the its head-element and tail-stream. - * @tparam S The hidden state of the stream that we are turning into a Scan. - * @tparam O The type of the elments of the stream that we are turning into a Scan - * @return A Scan whose inputs are irrelevant, and whose outputs are those that we would get from implementing - * a stream using the information provided to this method. + * Scans take streams of inputs to streams of outputs, but some scans have trivial inputs and just produce a + * stream of outputs. Streams can be thought of as being a hidden state that is queryable for a head + * element, and another hidden state that represents the rest of the stream. + * @param initState + * The initial state of the scan; think of this as an infinite stream. + * @param destructor + * This function decomposes a stream into the its head-element and tail-stream. + * @tparam S + * The hidden state of the stream that we are turning into a Scan. + * @tparam O + * The type of the elments of the stream that we are turning into a Scan + * @return + * A Scan whose inputs are irrelevant, and whose outputs are those that we would get from implementing a + * stream using the information provided to this method. */ def iterate[S, O](initState: S)(destructor: S => (O, S)): Aux[Any, S, O] = new Scan[Any, O] { override type State = S @@ -54,12 +59,15 @@ object Scan { def identity[A]: Aux[A, Unit, A] = fromFunction[A, A](x => x) /** - * @param initStateCreator A call-by-name method that allocates new mutable state - * @param presentAndUpdateStateFn A function that both presents the output value, and has the side-effect of updating the mutable state + * @param initStateCreator + * A call-by-name method that allocates new mutable state + * @param presentAndUpdateStateFn + * A function that both presents the output value, and has the side-effect of updating the mutable state * @tparam I * @tparam S * @tparam O - * @return A Scan that safely encapsulates state while it's doing its thing. + * @return + * A Scan that safely encapsulates state while it's doing its thing. */ def mutable[I, S, O](initStateCreator: => S)(presentAndUpdateStateFn: (I, S) => O): Aux[I, S, O] = new Scan[I, O] { @@ -81,8 +89,9 @@ object Scan { * @tparam A * @tparam B * @tparam C - * @return A scan which, when given `[a_1, ..., a_n]` outputs `[c_1, ..., c_n]` where - * `c_i = initState + aggregator.prepare(a_1) + ... + aggregator.prepare(a_i)` + * @return + * A scan which, when given `[a_1, ..., a_n]` outputs `[c_1, ..., c_n]` where `c_i = initState + + * aggregator.prepare(a_1) + ... + aggregator.prepare(a_i)` */ def fromAggregator[A, B, C](aggregator: Aggregator[A, B, C], initState: B): Aux[A, B, C] = from(initState) { (a: A, stateBeforeProcessingI: B) => @@ -98,8 +107,9 @@ object Scan { * @tparam A * @tparam B * @tparam C - * @return A scan which, when given `[a_1, ..., a_n]` outputs `[c_1, ..., c_n]` where - * `c_i = monoidAggregator.monoid.zero + aggregator.prepare(a_1) + ... + aggregator.prepare(a_i)` + * @return + * A scan which, when given `[a_1, ..., a_n]` outputs `[c_1, ..., c_n]` where `c_i = + * monoidAggregator.monoid.zero + aggregator.prepare(a_1) + ... + aggregator.prepare(a_i)` */ def fromMonoidAggregator[A, B, C](monoidAggregator: MonoidAggregator[A, B, C]): Aux[A, B, C] = fromAggregator(monoidAggregator, monoidAggregator.monoid.zero) @@ -107,20 +117,22 @@ object Scan { } /** - * The Scan trait is an alternative to the `scanLeft` method on iterators/other collections for a range of - * of use-cases where `scanLeft` is awkward to use. At a high level it provides some of the same functionality as - * `scanLeft`, but with a separation of "what is the state of the scan" from - * "what are the elements that I'm scanning over?". In particular, when scanning over an iterator with `N` elements, - * the output is an iterator with `N` elements (in contrast to scanLeft's `N+1`). + * The Scan trait is an alternative to the `scanLeft` method on iterators/other collections for a range of of + * use-cases where `scanLeft` is awkward to use. At a high level it provides some of the same functionality as + * `scanLeft`, but with a separation of "what is the state of the scan" from "what are the elements that I'm + * scanning over?". In particular, when scanning over an iterator with `N` elements, the output is an iterator + * with `N` elements (in contrast to scanLeft's `N+1`). * - * If you find yourself writing a `scanLeft` over pairs of elements, where you only use one element of the pair within - * the `scanLeft`, then throw that element away in a `map` immediately after the scanLeft is done, then this - * abstraction is for you. + * If you find yourself writing a `scanLeft` over pairs of elements, where you only use one element of the + * pair within the `scanLeft`, then throw that element away in a `map` immediately after the scanLeft is done, + * then this abstraction is for you. * * The canonical method to use a scan is `apply`. * - * @tparam I The type of elements that the computation is scanning over. - * @tparam O The output type of the scan (typically distinct from the hidden `State` of the scan). + * @tparam I + * The type of elements that the computation is scanning over. + * @tparam O + * The output type of the scan (typically distinct from the hidden `State` of the scan). */ sealed abstract class Scan[-I, +O] extends Serializable { @@ -138,19 +150,21 @@ sealed abstract class Scan[-I, +O] extends Serializable { def initialState: State /** - * @param i An element in the stream to process - * @param stateBeforeProcessingI The state of the scan before processing i - * @return The output of the scan corresponding to processing i with state stateBeforeProcessing, - * along with the result of updating stateBeforeProcessing with the information from i. + * @param i + * An element in the stream to process + * @param stateBeforeProcessingI + * The state of the scan before processing i + * @return + * The output of the scan corresponding to processing i with state stateBeforeProcessing, along with the + * result of updating stateBeforeProcessing with the information from i. */ def presentAndNextState(i: I, stateBeforeProcessingI: State): (O, State) /** * @param iter - * @return If `iter = Iterator(a_1, ..., a_n)`, return:` - * `Iterator(o_1, ..., o_n)` where - * `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)` - * and `state_0 = initialState` + * @return + * If `iter = Iterator(a_1, ..., a_n)`, return:` `Iterator(o_1, ..., o_n)` where `(o_(i+1), state_(i+1)) = + * presentAndNextState(a_i, state_i)` and `state_0 = initialState` */ def scanIterator(iter: Iterator[I]): Iterator[O] = new AbstractIterator[O] { override def hasNext: Boolean = iter.hasNext @@ -167,13 +181,14 @@ sealed abstract class Scan[-I, +O] extends Serializable { /** * @param inputs * @param bf - * @tparam In The type of the input collection - * @tparam Out The type of the output collection + * @tparam In + * The type of the input collection + * @tparam Out + * The type of the output collection * @return - * Given inputs as a collection of the form `[a_1, ..., a_n]` the output will be a collection of the form: - * `[o_1, ..., o_n]` where - * `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)` - * and `state_0 = initialState`. + * Given inputs as a collection of the form `[a_1, ..., a_n]` the output will be a collection of the form: + * `[o_1, ..., o_n]` where `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)` and `state_0 = + * initialState`. */ def apply[In <: TraversableOnce[I], Out]( inputs: In @@ -200,13 +215,13 @@ sealed abstract class Scan[-I, +O] extends Serializable { } /** - * Return a scan that is semantically identical to - * `this.join(Scan.identity[I1])`, but where we don't pollute the `State` by pairing it - * redundantly with `Unit`. + * Return a scan that is semantically identical to `this.join(Scan.identity[I1])`, but where we don't + * pollute the `State` by pairing it redundantly with `Unit`. * @tparam I1 - * @return If this Scan's `apply` method is given inputs `[a_1, ..., a_n]` resulting in outputs - * of the form `[o_1, ..., o_n`, then this results in a Scan whose `apply` method - * returns `[(o_1, a_1), ..., (o_n, a_n)]` when given the same input. + * @return + * If this Scan's `apply` method is given inputs `[a_1, ..., a_n]` resulting in outputs of the form `[o_1, + * ..., o_n`, then this results in a Scan whose `apply` method returns `[(o_1, a_1), ..., (o_n, a_n)]` + * when given the same input. */ def joinWithInput[I1 <: I]: Aux[I1, State, (O, I1)] = from(initialState) { (i, stateBeforeProcessingI) => val (o, stateAfterProcessingI) = presentAndNextState(i, stateBeforeProcessingI) @@ -215,11 +230,11 @@ sealed abstract class Scan[-I, +O] extends Serializable { /** * Return a scan whose output is paired with the state of the scan before each input updates the state. - * @return If this Scan's `apply` method is given inputs [a_1, ..., a_n] resulting in outputs - * of the form `[o_1, ..., o_n]`, where `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)` - * and `state_0 = initialState`, - * return a scan that whose apply method, when given inputs `[a_1, ..., a_n]` will return - * `[(o_1, state_0), ..., (o_n, state_(n-1))]`. + * @return + * If this Scan's `apply` method is given inputs [a_1, ..., a_n] resulting in outputs of the form `[o_1, + * ..., o_n]`, where `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)` and `state_0 = + * initialState`, return a scan that whose apply method, when given inputs `[a_1, ..., a_n]` will return + * `[(o_1, state_0), ..., (o_n, state_(n-1))]`. */ def joinWithPriorState: Aux[I, State, (State, O)] = from(initialState) { (i, stateBeforeProcessingI) => val (o, stateAfterProcessingA) = presentAndNextState(i, stateBeforeProcessingI) @@ -228,11 +243,11 @@ sealed abstract class Scan[-I, +O] extends Serializable { /** * Return a scan whose output is paired with the state of the scan after each input updates the state. - * @return If this Scan's `apply` method is given inputs `[a_1, ..., a_n]` resulting in outputs - * of the form `[o_1, ..., o_n]`, where `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)`` - * and state_0 = initialState, - * return a scan that whose apply method, when given inputs `[a_1, ..., a_n]` will return - * `[(o_1, state_1), ..., (o_n, state_n]`. + * @return + * If this Scan's `apply` method is given inputs `[a_1, ..., a_n]` resulting in outputs of the form `[o_1, + * ..., o_n]`, where `(o_(i+1), state_(i+1)) = presentAndNextState(a_i, state_i)`` and state_0 = + * initialState, return a scan that whose apply method, when given inputs `[a_1, ..., a_n]` will return + * `[(o_1, state_1), ..., (o_n, state_n]`. */ def joinWithPosteriorState: Aux[I, State, (O, State)] = from(initialState) { (i, stateBeforeProcessingI) => val (c, stateAfterProcessingA) = presentAndNextState(i, stateBeforeProcessingI) @@ -242,23 +257,23 @@ sealed abstract class Scan[-I, +O] extends Serializable { /** * For every `foo`, `scan.joinWithIndex(foo) == scan(foo).zipWithIndex`. * @return - * If this Scan's `apply` method is given inputs `[a_1, ..., a_n]` resulting in outputs - * of the form `[o_1, ..., o_n]`, return a scan that whose apply method, when given the same input, will return - * `[(o_1, 1), ..., (o_n, n)]`. + * If this Scan's `apply` method is given inputs `[a_1, ..., a_n]` resulting in outputs of the form `[o_1, + * ..., o_n]`, return a scan that whose apply method, when given the same input, will return `[(o_1, 1), + * ..., (o_n, n)]`. */ def joinWithIndex: Aux[I, (State, Long), (O, Long)] = join(Scan.index) /** - * Compose two scans pairwise such that, when given pairwise zipped inputs, the resulting scan will output pairwise - * zipped outputs. + * Compose two scans pairwise such that, when given pairwise zipped inputs, the resulting scan will output + * pairwise zipped outputs. * @param scan2 * @tparam I2 * @tparam O2 - * @return If this Scan's apply method is given inputs `[a_1, ..., a_n]` resulting in outputs of - * the form `[o_1, ..., o_n]`, and `scan2.apply([b_1, ..., b_n] = [p_1, ..., p_n]` then - * `zip` will return a scan whose apply method, when given input - * `[(a_1, b_1), ..., (a_n, b_n)]` results in the output `[(o_1, p_1), ..., (o_2, p_2)]`. - * In other words: `scan.zip(scan2)(foo.zip(bar)) == scan(foo).zip(scan2(bar))` + * @return + * If this Scan's apply method is given inputs `[a_1, ..., a_n]` resulting in outputs of the form `[o_1, + * ..., o_n]`, and `scan2.apply([b_1, ..., b_n] = [p_1, ..., p_n]` then `zip` will return a scan whose + * apply method, when given input `[(a_1, b_1), ..., (a_n, b_n)]` results in the output `[(o_1, p_1), ..., + * (o_2, p_2)]`. In other words: `scan.zip(scan2)(foo.zip(bar)) == scan(foo).zip(scan2(bar))` */ def zip[I2, O2](scan2: Scan[I2, O2]): Aux[(I, I2), (State, scan2.State), (O, O2)] = from((initialState, scan2.initialState)) { (i1i2, stateBeforeProcessingI1I2) => @@ -275,10 +290,11 @@ sealed abstract class Scan[-I, +O] extends Serializable { * @param scan2 * @tparam I2 * @tparam O2 - * @return If this Scan's apply method is given inputs [a_1, ..., a_n] resulting in outputs of - * the form `[o_1, ..., o_n]`, and `scan2.apply([a_1, ..., a_n] = [p_1, ..., p_n]` then - * `join` will return a scan whose apply method returns `[(o_1, p_1), ..., (o_2, p_2)]`. - * In other words: `scan.join(scan2)(foo) == scan(foo).zip(scan2(foo))` + * @return + * If this Scan's apply method is given inputs [a_1, ..., a_n] resulting in outputs of the form `[o_1, + * ..., o_n]`, and `scan2.apply([a_1, ..., a_n] = [p_1, ..., p_n]` then `join` will return a scan whose + * apply method returns `[(o_1, p_1), ..., (o_2, p_2)]`. In other words: `scan.join(scan2)(foo) == + * scan(foo).zip(scan2(foo))` */ def join[I2 <: I, O2](scan2: Scan[I2, O2]): Aux[I2, (State, scan2.State), (O, O2)] = from((initialState, scan2.initialState)) { (i, stateBeforeProcessingI) => @@ -291,9 +307,10 @@ sealed abstract class Scan[-I, +O] extends Serializable { * Takes the output of this scan and feeds as input into scan2. * @param scan2 * @tparam P - * @return If this Scan's apply method is given inputs `[a_1, ..., a_n]` resulting in outputs of - * the form `[o_1, ..., o_n]`, and `scan2.apply([o_1, ..., o_n] = [p_1, ..., p_n]` then - * `compose` will return a scan which returns `[p_1, ..., p_n]`. + * @return + * If this Scan's apply method is given inputs `[a_1, ..., a_n]` resulting in outputs of the form `[o_1, + * ..., o_n]`, and `scan2.apply([o_1, ..., o_n] = [p_1, ..., p_n]` then `compose` will return a scan which + * returns `[p_1, ..., p_n]`. */ def compose[P](scan2: Scan[O, P]): Aux[I, (State, scan2.State), P] = from((initialState, scan2.initialState)) { (i, stateBeforeProcessingI) => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala index 30a287749..c4a5550f2 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala @@ -18,12 +18,12 @@ package com.twitter.algebird import algebra.{Semigroup => ASemigroup} import algebra.ring.AdditiveSemigroup import java.lang.{ + Boolean => JBool, + Double => JDouble, + Float => JFloat, Integer => JInt, - Short => JShort, Long => JLong, - Float => JFloat, - Double => JDouble, - Boolean => JBool + Short => JShort } import java.util.{List => JList, Map => JMap} @@ -38,23 +38,25 @@ import scala.annotation.{implicitNotFound, tailrec} * }}} * * Example instances: - * - `Semigroup[Int]`: `plus` `Int#+` - * - `Semigroup[List[T]]`: `plus` is `List#++` + * - `Semigroup[Int]`: `plus` `Int#+` + * - `Semigroup[List[T]]`: `plus` is `List#++` * - * @define T T + * @define T + * T */ @implicitNotFound(msg = "Cannot find Semigroup type class for ${T}") trait Semigroup[@specialized(Int, Long, Float, Double) T] extends ASemigroup[T] with AdditiveSemigroup[T] { /** - * Returns an instance of `$T` calculated by summing all instances in - * `iter` in one pass. Returns `None` if `iter` is empty, else - * `Some[$T]`. + * Returns an instance of `$T` calculated by summing all instances in `iter` in one pass. Returns `None` if + * `iter` is empty, else `Some[$T]`. * - * @param iter instances of `$T` to be combined - * @return `None` if `iter` is empty, else an option value containing the summed `$T` - * @note Override if there is a faster way to compute this sum than - * `iter.reduceLeftOption` using [[plus]]. + * @param iter + * instances of `$T` to be combined + * @return + * `None` if `iter` is empty, else an option value containing the summed `$T` + * @note + * Override if there is a faster way to compute this sum than `iter.reduceLeftOption` using [[plus]]. */ def sumOption(iter: TraversableOnce[T]): Option[T] = iter.reduceLeftOption(plus(_, _)) @@ -74,13 +76,13 @@ trait Semigroup[@specialized(Int, Long, Float, Double) T] extends ASemigroup[T] abstract class AbstractSemigroup[T] extends Semigroup[T] /** - * Either semigroup is useful for error handling. - * if everything is correct, use Right (it's right, get it?), if something goes - * wrong, use Left. plus does the normal thing for plus(Right, Right), or plus(Left, Left), - * but if exactly one is Left, we return that value (to keep the error condition). - * Typically, the left value will be a string representing the errors. + * Either semigroup is useful for error handling. if everything is correct, use Right (it's right, get it?), + * if something goes wrong, use Left. plus does the normal thing for plus(Right, Right), or plus(Left, Left), + * but if exactly one is Left, we return that value (to keep the error condition). Typically, the left value + * will be a string representing the errors. * - * @define T Either[L, R] + * @define T + * Either[L, R] */ class EitherSemigroup[L, R](implicit semigroupl: Semigroup[L], semigroupr: Semigroup[R]) extends Semigroup[Either[L, R]] { @@ -147,8 +149,7 @@ object Semigroup new Semigroup[T] { override def plus(l: T, r: T): T = associativeFn(l, r) } /** - * Same as v + v + v .. + v (i times in total) - * requires i > 0, wish we had PositiveBigInt as a class + * Same as v + v + v .. + v (i times in total) requires i > 0, wish we had PositiveBigInt as a class */ def intTimes[T](i: BigInt, v: T)(implicit sg: Semigroup[T]): T = { require(i > 0, "Cannot do non-positive products with a Semigroup, try Monoid/Group.intTimes") diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SetDiff.scala b/algebird-core/src/main/scala/com/twitter/algebird/SetDiff.scala index c41aaa2c5..7dee70bf4 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SetDiff.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SetDiff.scala @@ -17,9 +17,8 @@ limitations under the License. package com.twitter.algebird /** - * `SetDiff` is a class that represents changes applied to a set. It - * is in fact a Set[T] => Set[T], but doesn't extend Function1 since - * that brings in a pack of methods that we don't necessarily want. + * `SetDiff` is a class that represents changes applied to a set. It is in fact a Set[T] => Set[T], but + * doesn't extend Function1 since that brings in a pack of methods that we don't necessarily want. */ sealed abstract case class SetDiff[T] private (add: Set[T], remove: Set[T]) { self => @@ -39,14 +38,13 @@ sealed abstract case class SetDiff[T] private (add: Set[T], remove: Set[T]) { def apply(previous: Set[T]): Set[T] = previous ++ add -- remove /** - * Returns a diff that, if applied to a set, undoes the effects of - * this diff. + * Returns a diff that, if applied to a set, undoes the effects of this diff. */ def invert: SetDiff[T] = SetDiff(remove, add) /** - * Same as apply, but fails to None if the diff's removal set has - * any items that aren't present in `previous`. + * Same as apply, but fails to None if the diff's removal set has any items that aren't present in + * `previous`. * * Returns Some(_) if and only if invert will undo. */ @@ -59,9 +57,8 @@ sealed abstract case class SetDiff[T] private (add: Set[T], remove: Set[T]) { object SetDiff { /** - * Keeping this constructor private prevents creation of ad-hoc, - * invalid `SetDiff` instances. `SetDiff`s must be created by - * construction with the supplied helper methods below. + * Keeping this constructor private prevents creation of ad-hoc, invalid `SetDiff` instances. `SetDiff`s + * must be created by construction with the supplied helper methods below. */ private[SetDiff] def apply[T](add: Set[T], remove: Set[T]): SetDiff[T] = new SetDiff[T](add, remove) {} @@ -80,8 +77,7 @@ object SetDiff { def empty[T]: SetDiff[T] = SetDiff(Set.empty, Set.empty) /** - * Tracks the changes between the old and new set in a SetDiff[T] - * instance. The law that diffs preserve is: + * Tracks the changes between the old and new set in a SetDiff[T] instance. The law that diffs preserve is: * * {{{ * val diff = SetDiff.of(a, b) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala index 33fddf9c1..ae40402b4 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala @@ -20,9 +20,9 @@ import algebra.CommutativeMonoid import com.twitter.algebird.matrix.AdaptiveMatrix /** - * A Sketch Map is a generalized version of the Count-Min Sketch that is an - * approximation of Map[K, V] that stores reference to top heavy hitters. The - * Sketch Map can approximate the sums of any summable value that has a monoid. + * A Sketch Map is a generalized version of the Count-Min Sketch that is an approximation of Map[K, V] that + * stores reference to top heavy hitters. The Sketch Map can approximate the sums of any summable value that + * has a monoid. */ /** * Hashes an arbitrary key type to one that the Sketch Map can use. @@ -159,8 +159,7 @@ case class SketchMapParams[K](seed: Int, width: Int, depth: Int, heavyHittersCou }.min /** - * Returns a new set of sorted and concatenated heavy hitters given an - * arbitrary list of keys. + * Returns a new set of sorted and concatenated heavy hitters given an arbitrary list of keys. */ def updatedHeavyHitters[V: Ordering](hitters: Seq[K], table: AdaptiveMatrix[V]): List[K] = { val mapping: Map[K, V] = @@ -181,9 +180,8 @@ object SketchMapParams { SketchMapParams[K](seed, width(eps), depth(delta), heavyHittersCount)(serialization) /** - * Functions to translate between (eps, delta) and (depth, width). The translation is: - * depth = ceil(ln 1/delta) - * width = ceil(e / eps) + * Functions to translate between (eps, delta) and (depth, width). The translation is: depth = ceil(ln + * 1/delta) width = ceil(e / eps) */ def eps(width: Int): Double = scala.math.exp(1.0) / width def delta(depth: Int): Double = 1.0 / scala.math.exp(depth) @@ -193,26 +191,23 @@ object SketchMapParams { } /** - * Data structure representing an approximation of Map[K, V], where V has an - * implicit ordering and monoid. This is a more generic version of - * CountMinSketch. + * Data structure representing an approximation of Map[K, V], where V has an implicit ordering and monoid. + * This is a more generic version of CountMinSketch. * - * Values are stored in valuesTable, a 2D vector containing aggregated sums of - * values inserted to the Sketch Map. + * Values are stored in valuesTable, a 2D vector containing aggregated sums of values inserted to the Sketch + * Map. * - * The data structure stores top non-zero values, called Heavy Hitters. The - * values are sorted by an implicit reverse ordering for the value, and the - * number of heavy hitters stored is based on the heavyHittersCount set in - * params. + * The data structure stores top non-zero values, called Heavy Hitters. The values are sorted by an implicit + * reverse ordering for the value, and the number of heavy hitters stored is based on the heavyHittersCount + * set in params. * * Use SketchMapMonoid to create instances of this class. */ object SketchMap { /** - * Generates a monoid used to create SketchMap instances. Requires a - * serialization from K to Array[Byte] for hashing, an ordering for V, and a - * monoid for V. + * Generates a monoid used to create SketchMap instances. Requires a serialization from K to Array[Byte] for + * hashing, an ordering for V, and a monoid for V. */ def monoid[K, V]( params: SketchMapParams[K] @@ -232,8 +227,7 @@ case class SketchMap[K, V]( ) extends java.io.Serializable /** - * An Aggregator for the SketchMap. - * Can be created using SketchMap.aggregator + * An Aggregator for the SketchMap. Can be created using SketchMap.aggregator */ case class SketchMapAggregator[K, V: Ordering: Monoid]( params: SketchMapParams[K], diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 08fbe1bec..0eca7c557 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -8,14 +8,14 @@ import scala.util.{Failure, Success, Try} object SpaceSaver { /** - * Construct SpaceSaver with given capacity containing a single item. - * This is the public api to create a new SpaceSaver. + * Construct SpaceSaver with given capacity containing a single item. This is the public api to create a new + * SpaceSaver. */ def apply[T](capacity: Int, item: T): SpaceSaver[T] = SSOne(capacity, item) /** - * Construct SpaceSaver with given capacity containing a single item with provided exact count. - * This is the public api to create a new SpaceSaver. + * Construct SpaceSaver with given capacity containing a single item with provided exact count. This is the + * public api to create a new SpaceSaver. */ def apply[T](capacity: Int, item: T, count: Long): SpaceSaver[T] = SSMany(capacity, Map(item -> ((count, 0L)))) @@ -30,9 +30,9 @@ object SpaceSaver { /** * Encodes the SpaceSaver as a sequence of bytes containing in order - * - 1 byte: 1/2 => 1 = SSOne, 2 = SSMany - * - 4 bytes: the capacity - * - N bytes: the item/counters (counters as length + N*(item size + item + 2 * counters) + * - 1 byte: 1/2 => 1 = SSOne, 2 = SSMany + * - 4 bytes: the capacity + * - N bytes: the item/counters (counters as length + N*(item size + item + 2 * counters) */ def toBytes[T](ss: SpaceSaver[T], tSerializer: T => Array[Byte]): Array[Byte] = ss match { @@ -124,11 +124,11 @@ object SpaceSaver { /** * Data structure used in the Space-Saving Algorithm to find the approximate most frequent and top-k elements. - * The algorithm is described in "Efficient Computation of Frequent and Top-k Elements in Data Streams". - * See here: www.cs.ucsb.edu/research/tech_reports/reports/2005-23.pdf - * In the paper the data structure is called StreamSummary but we chose to call it SpaceSaver instead. - * Note that the adaptation to hadoop and parallelization were not described in the article and have not been proven - * to be mathematically correct or preserve the guarantees or benefits of the algorithm. + * The algorithm is described in "Efficient Computation of Frequent and Top-k Elements in Data Streams". See + * here: www.cs.ucsb.edu/research/tech_reports/reports/2005-23.pdf In the paper the data structure is called + * StreamSummary but we chose to call it SpaceSaver instead. Note that the adaptation to hadoop and + * parallelization were not described in the article and have not been proven to be mathematically correct or + * preserve the guarantees or benefits of the algorithm. */ sealed abstract class SpaceSaver[T] { import SpaceSaver.ordering @@ -144,7 +144,8 @@ sealed abstract class SpaceSaver[T] { def min: Long /** - * Map of item to counter, where each counter consists of an observed count and possible over-estimation (error) + * Map of item to counter, where each counter consists of an observed count and possible over-estimation + * (error) */ def counters: Map[T, (Long, Long)] @@ -159,8 +160,8 @@ sealed abstract class SpaceSaver[T] { } /** - * Get the elements that show up more than thres times. - * Returns sorted in descending order: (item, Approximate[Long], guaranteed) + * Get the elements that show up more than thres times. Returns sorted in descending order: (item, + * Approximate[Long], guaranteed) */ def mostFrequent(thres: Int): Seq[(T, Approximate[Long], Boolean)] = counters.iterator @@ -172,8 +173,7 @@ sealed abstract class SpaceSaver[T] { } /** - * Get the top-k elements. - * Returns sorted in descending order: (item, Approximate[Long], guaranteed) + * Get the top-k elements. Returns sorted in descending order: (item, Approximate[Long], guaranteed) */ def topK(k: Int): Seq[(T, Approximate[Long], Boolean)] = { require(k < capacity) @@ -187,8 +187,8 @@ sealed abstract class SpaceSaver[T] { } /** - * Check consistency with other SpaceSaver, useful for testing. - * Returns boolean indicating if they are consistent + * Check consistency with other SpaceSaver, useful for testing. Returns boolean indicating if they are + * consistent */ def consistentWith(that: SpaceSaver[T]): Boolean = (counters.keys ++ that.counters.keys).forall(item => (frequency(item) - that.frequency(item)) ~ 0) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/StatefulSummer.scala b/algebird-core/src/main/scala/com/twitter/algebird/StatefulSummer.scala index c4812648e..9388ee255 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/StatefulSummer.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/StatefulSummer.scala @@ -17,21 +17,18 @@ limitations under the License. package com.twitter.algebird /** - * A Stateful summer is something that is potentially more efficient - * (a buffer, a cache, etc...) that has the same result as a sum: - * Law 1: Semigroup.sumOption(items) == - * (Monoid.plus(items.map { stateful.put(_) }.filter { _.isDefined }, stateful.flush) && - * stateful.isFlushed) - * Law 2: isFlushed == flush.isEmpty - * @author Oscar Boykin + * A Stateful summer is something that is potentially more efficient (a buffer, a cache, etc...) that has the + * same result as a sum: Law 1: Semigroup.sumOption(items) == (Monoid.plus(items.map { stateful.put(_) + * }.filter { _.isDefined }, stateful.flush) && stateful.isFlushed) Law 2: isFlushed == flush.isEmpty + * @author + * Oscar Boykin */ trait StatefulSummer[V] extends Buffered[V, V] { def semigroup: Semigroup[V] } /** - * Sum the entire iterator one item at a time. Only emits on flush - * you should probably prefer BufferedSumAll + * Sum the entire iterator one item at a time. Only emits on flush you should probably prefer BufferedSumAll */ class SumAll[V](implicit override val semigroup: Semigroup[V]) extends StatefulSummer[V] { var summed: Option[V] = None diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala b/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala index 0e38b51dd..38fac8209 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Successible.scala @@ -16,12 +16,11 @@ limitations under the License. package com.twitter.algebird /** - * This is a typeclass to represent things which increase. Note that it is important - * that a value after being incremented is always larger than it was before. Note - * that next returns Option because this class comes with the notion of the "greatest" - * key, which is None. Ints, for example, will cycle if next(java.lang.Integer.MAX_VALUE) - * is called, therefore we need a notion of what happens when we hit the bounds at - * which our ordering is violating. This is also useful for closed sets which have a fixed + * This is a typeclass to represent things which increase. Note that it is important that a value after being + * incremented is always larger than it was before. Note that next returns Option because this class comes + * with the notion of the "greatest" key, which is None. Ints, for example, will cycle if + * next(java.lang.Integer.MAX_VALUE) is called, therefore we need a notion of what happens when we hit the + * bounds at which our ordering is violating. This is also useful for closed sets which have a fixed * progression. */ trait Successible[T] extends Serializable { @@ -56,8 +55,8 @@ trait Successible[T] extends Serializable { object Successible { /** - * This makes it easy to construct from a function when T has an ordering, which is common - * Note, your function must respect the ordering + * This makes it easy to construct from a function when T has an ordering, which is common Note, your + * function must respect the ordering */ def fromNextOrd[T](nextFn: T => Option[T])(implicit ord: Ordering[T]): Successible[T] = new Successible[T] { override def next(t: T): Option[T] = nextFn(t) @@ -74,8 +73,8 @@ object Successible { new IntegralSuccessible[N] /** - * The difference between this and the default ordering on Option[T] is that it treats None - * as the max value, instead of the minimum value. + * The difference between this and the default ordering on Option[T] is that it treats None as the max + * value, instead of the minimum value. */ def optionOrdering[T](implicit ord: Ordering[T]): Ordering[Option[T]] = new Ordering[Option[T]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala index 0ca62ebe2..4cd9a1505 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala @@ -17,7 +17,8 @@ limitations under the License. package com.twitter.algebird /** - * @author Oscar Boykin + * @author + * Oscar Boykin */ import java.util.{LinkedHashMap => JLinkedHashMap, Map => JMap} import scala.collection.mutable.{Map => MMap} diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala index 24afa0a0f..cd9e7deaf 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala @@ -17,15 +17,15 @@ limitations under the License. package com.twitter.algebird /** - * @author Oscar Boykin + * @author + * Oscar Boykin */ import scala.annotation.tailrec /** - * Creates an Iterator that emits partial sums of an input Iterator[V]. - * Generally this is useful to change from processing individual Vs to - * possibly blocks of V @see SummingQueue or a cache of recent Keys in - * a V=Map[K,W] case: @see SummingCache + * Creates an Iterator that emits partial sums of an input Iterator[V]. Generally this is useful to change + * from processing individual Vs to possibly blocks of V @see SummingQueue or a cache of recent Keys in a + * V=Map[K,W] case: @see SummingCache */ object SummingIterator { def apply[V](summer: StatefulSummer[V], it: Iterator[V]): SummingIterator[V] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala index ab1fbdacb..0717e54c1 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala @@ -17,18 +17,18 @@ limitations under the License. package com.twitter.algebird /** - * A useful utility for aggregation systems: you buffer up some number of items - * in a thread-safe way, and when you have at most K of them, you sum them all - * together. A good use-case of this is doing a limited preaggregation before - * sending on to a next phase (from mappers to reducers on Hadoop, or between - * storm bolts). + * A useful utility for aggregation systems: you buffer up some number of items in a thread-safe way, and when + * you have at most K of them, you sum them all together. A good use-case of this is doing a limited + * preaggregation before sending on to a next phase (from mappers to reducers on Hadoop, or between storm + * bolts). * - * Without this finite buffer history, an aggregated item could build up infinite - * history, and therefore it is unbounded in the error you could introduce by - * losing the buffer. + * Without this finite buffer history, an aggregated item could build up infinite history, and therefore it is + * unbounded in the error you could introduce by losing the buffer. * - * @author Ashu Singhal - * @author Oscar Boykin + * @author + * Ashu Singhal + * @author + * Oscar Boykin */ import java.util.concurrent.ArrayBlockingQueue @@ -46,9 +46,8 @@ class SummingQueue[V] private (capacity: Int)(override implicit val semigroup: S if (capacity > 0) Some(new ArrayBlockingQueue[V](capacity, true)) else None /** - * puts an item to the queue, optionally sums up the queue and returns value - * This never blocks interally. It uses offer. If the queue is full, we drain, - * sum the queue. + * puts an item to the queue, optionally sums up the queue and returns value This never blocks interally. It + * uses offer. If the queue is full, we drain, sum the queue. */ override final def put(item: V): Option[V] = if (queueOption.isDefined) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala index b67f8203f..4eca0b803 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala @@ -41,10 +41,8 @@ object TopKMonoid extends java.io.Serializable { } /** - * A top-k monoid that is much faster than SortedListTake - * equivalent to: (left ++ right).sorted.take(k) - * but doesn't do a total sort - * If you can handle the mutability, mutable.PriorityQueueMonoid is even faster. + * A top-k monoid that is much faster than SortedListTake equivalent to: (left ++ right).sorted.take(k) but + * doesn't do a total sort If you can handle the mutability, mutable.PriorityQueueMonoid is even faster. * * NOTE!!!! This assumes the inputs are already sorted! resorting each time kills speed */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala index 16704052c..59db253ff 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala @@ -21,39 +21,25 @@ import java.io.Serializable import Operators._ /** - * Convenience case class defined with a monoid for aggregating elements over - * a finite window. + * Convenience case class defined with a monoid for aggregating elements over a finite window. * - * @param total Known running total of `T` - * @param items queue of known trailing elements. + * @param total + * Known running total of `T` + * @param items + * queue of known trailing elements. * - * Example usage: + * Example usage: * - * case class W28[T](window: Window[T]) { - * def total = this.window.total - * def items = this.window.items - * def size = this.window.size - * } + * case class W28[T](window: Window[T]) { def total = this.window.total def items = this.window.items def size + * = this.window.size } * - * object W28 { - * val windowSize = 28 - * def apply[T](v: T): W28[T] = W28[T](Window(v)) + * object W28 { val windowSize = 28 def apply[T](v: T): W28[T] = W28[T](Window(v)) * - * implicit def w28Monoid[T](implicit p: Priority[Group[T], Monoid[T]]): Monoid[W28[T]] = - * new Monoid[W28[T]] { - * private val WT: Monoid[Window[T]] = Window.monoid[T](windowSize) - * def zero = W28[T](WT.zero) - * def plus(a: W28[T], b: W28[T]): W28[T] = - * W28[T](WT.plus(a.window, b.window)) - * } - * } - * val elements = getElements() + * implicit def w28Monoid[T](implicit p: Priority[Group[T], Monoid[T]]): Monoid[W28[T]] = new Monoid[W28[T]] { + * private val WT: Monoid[Window[T]] = Window.monoid[T](windowSize) def zero = W28[T](WT.zero) def plus(a: + * W28[T], b: W28[T]): W28[T] = W28[T](WT.plus(a.window, b.window)) } } val elements = getElements() * - * val trailing90Totals = - * elements - * .map{ W90 } - * .scanLeft(W90(0)) { (a, b) => a + b } - * .map{ _.total } + * val trailing90Totals = elements .map{ W90 } .scanLeft(W90(0)) { (a, b) => a + b } .map{ _.total } */ case class Window[T](total: T, items: Queue[T]) { def size: Int = items.size @@ -83,8 +69,7 @@ object Window extends Serializable { WindowMonoidFromMonoid[T](size) /** - * This is a faster way to combine two Windows if you - * have a group + * This is a faster way to combine two Windows if you have a group */ def combineWithGroup[T: Group](windowSize: Int, a: Window[T], b: Window[T]): Window[T] = if (b.items.size >= windowSize) { @@ -126,7 +111,8 @@ object Window extends Serializable { /** * Provides a natural monoid for combining windows truncated to some window size. * - * @param windowSize Upper limit of the number of items in a window. + * @param windowSize + * Upper limit of the number of items in a window. */ abstract class WindowMonoid[T](windowSize: Int) extends Monoid[Window[T]] { require(windowSize >= 1, s"Windows must have positive sizes, found $windowSize") diff --git a/algebird-core/src/main/scala/com/twitter/algebird/field.scala b/algebird-core/src/main/scala/com/twitter/algebird/field.scala index a34644fa3..fc0001775 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/field.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/field.scala @@ -1,16 +1,14 @@ package com.twitter.algebird -import java.lang.{Float => JFloat, Double => JDouble} +import java.lang.{Double => JDouble, Float => JFloat} /** - * This is here to ease transition to using algebra.Field as the field - * type. Intended use is to do: + * This is here to ease transition to using algebra.Field as the field type. Intended use is to do: * * {code} import com.twitter.algebird.field._ {/code} * - * Note, this are not strictly lawful since floating point - * arithmetic using IEEE-754 is only approximately associative - * and distributive. + * Note, this are not strictly lawful since floating point arithmetic using IEEE-754 is only approximately + * associative and distributive. */ object field { implicit object ForFloat extends Field[Float] { @@ -79,8 +77,7 @@ object field { } /** - * These methods were originally on algebird.Field, but are not present on - * algebra.Field + * These methods were originally on algebird.Field, but are not present on algebra.Field */ implicit class AlgebirdFieldEnrichments[T](val field: Field[T]) extends AnyVal { def assertNotZero(t: T): Unit = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala index df25bb628..d58a6c9ab 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BitSet.scala @@ -31,26 +31,20 @@ import BitSet.{Branch, Empty, Leaf} * This implementation is taken from cats-collections. * https://github.com/typelevel/cats-collections/blob/0336992942aba9aba4a322b629447fcabe251920/core/src/main/scala/cats/collections/BitSet.scala * - * A Bitset is a specialized type of set that tracks the `Int` values - * it contains: for each integer value, a BitSet uses a single bit to - * track whether the value is present (1) or absent (0). Bitsets are - * often sparse, since "missing" bits can be assumed to be zero. + * A Bitset is a specialized type of set that tracks the `Int` values it contains: for each integer value, a + * BitSet uses a single bit to track whether the value is present (1) or absent (0). Bitsets are often sparse, + * since "missing" bits can be assumed to be zero. * - * Unlike scala's default immutable this BitSet does not do a full - * copy on each added value. + * Unlike scala's default immutable this BitSet does not do a full copy on each added value. * - * Internally the implementation is a tree. Each leaf uses an - * Array[Long] value to hold up to 2048 bits, and each branch uses an - * Array[BitSet] to hold up to 32 subtrees (null subtrees are treated - * as empty). + * Internally the implementation is a tree. Each leaf uses an Array[Long] value to hold up to 2048 bits, and + * each branch uses an Array[BitSet] to hold up to 32 subtrees (null subtrees are treated as empty). * - * Bitset treats the values it stores as 32-bit unsigned values, which - * is relevant to the internal addressing methods as well as the order - * used by `iterator`. + * Bitset treats the values it stores as 32-bit unsigned values, which is relevant to the internal addressing + * methods as well as the order used by `iterator`. * - * The benchmarks suggest this bitset is MUCH faster than Scala's - * built-in bitset for cases where you may need many modifications and - * merges, (for example in a BloomFilter). + * The benchmarks suggest this bitset is MUCH faster than Scala's built-in bitset for cases where you may need + * many modifications and merges, (for example in a BloomFilter). */ sealed abstract class BitSet { lhs => @@ -59,39 +53,34 @@ sealed abstract class BitSet { lhs => * * Offset will always be a multiple of 2048 (2^11). * - * The `offset` is interpreted as a 32-bit unsigned integer. In - * other words, `(offset & 0xffffffffL)` will return the equivalent - * value as a signed 64-bit integer (between 0 and 4294967295). + * The `offset` is interpreted as a 32-bit unsigned integer. In other words, `(offset & 0xffffffffL)` will + * return the equivalent value as a signed 64-bit integer (between 0 and 4294967295). */ private[algebird] def offset: Int /** - * Limit is the first value beyond the range this subtree - * supports. + * Limit is the first value beyond the range this subtree supports. * - * In other words, the last value in the subtree's range is `limit - 1`. - * Like `offset`, `limit` will always be a multiple of 2048. + * In other words, the last value in the subtree's range is `limit - 1`. Like `offset`, `limit` will always + * be a multiple of 2048. * * Offset, limit, and height are related: * - * limit = offset + (32^height) * 2048 - * limit > offset (assuming both values are unsigned) + * limit = offset + (32^height) * 2048 limit > offset (assuming both values are unsigned) * - * Like `offset`, `limit` is interpreted as a 32-bit unsigned - * integer. + * Like `offset`, `limit` is interpreted as a 32-bit unsigned integer. */ private[algebird] def limit: Long /** * Height represents the number of "levels" this subtree contains. * - * For leaves, height is zero. For branches, height will always be - * between 1 and 5. This is because a branch with offset=0 and - * height=5 will have limit=68719476736, which exceeds the largest - * unsigned 32-bit value we might want to store (4294967295). + * For leaves, height is zero. For branches, height will always be between 1 and 5. This is because a branch + * with offset=0 and height=5 will have limit=68719476736, which exceeds the largest unsigned 32-bit value + * we might want to store (4294967295). * - * The calculation `(32^height) * 2048` tells you how many values a - * subtree contains (i.e. how many bits it holds). + * The calculation `(32^height) * 2048` tells you how many values a subtree contains (i.e. how many bits it + * holds). */ private[algebird] def height: Int @@ -103,32 +92,28 @@ sealed abstract class BitSet { lhs => def apply(n: Int): Boolean /** - * Return a bitset that contains `n` and whose other values are - * identical to this one's. If this bitset already contains `n` then this - * method does nothing. + * Return a bitset that contains `n` and whose other values are identical to this one's. If this bitset + * already contains `n` then this method does nothing. */ def +(n: Int): BitSet /** - * Return a bitset that does not contain `n` and whose other values - * are identical to this one's. If this bitset does not contain `n` - * then this method does nothing. + * Return a bitset that does not contain `n` and whose other values are identical to this one's. If this + * bitset does not contain `n` then this method does nothing. */ def -(n: Int): BitSet /** * Return the union of two bitsets as a new immutable bitset. * - * If either bitset contains a given value, the resulting bitset - * will also contain it. + * If either bitset contains a given value, the resulting bitset will also contain it. */ def |(rhs: BitSet): BitSet /** * Return the intersection of two bitsets as a new immutable bitset. * - * The resulting bitset will only contain a value if that value is - * present in both input bitsets. + * The resulting bitset will only contain a value if that value is present in both input bitsets. */ def &(rhs: BitSet): BitSet @@ -145,15 +130,12 @@ sealed abstract class BitSet { lhs => def ^(rhs: BitSet): BitSet /** - * Return this bitset minus the bits contained in the other bitset - * as a new immutable bitset. + * Return this bitset minus the bits contained in the other bitset as a new immutable bitset. * - * The resulting bitset will contain exactly those values which do - * appear in the left-hand side but do not appear in the right-hand - * side. + * The resulting bitset will contain exactly those values which do appear in the left-hand side but do not + * appear in the right-hand side. * - * If the bitsets do not intersect, the left-hand side will be - * returned. + * If the bitsets do not intersect, the left-hand side will be returned. */ def --(rhs: BitSet): BitSet @@ -171,29 +153,26 @@ sealed abstract class BitSet { lhs => /** * Add a single value `n` to this bitset. * - * This method modifies this bitset. We require that the value `n` - * is in this node's range (i.e. `offset <= n < limit`). + * This method modifies this bitset. We require that the value `n` is in this node's range (i.e. `offset <= + * n < limit`). */ private[algebird] def +=(n: Int): Unit /** * Add all values from `rhs` to this bitset. * - * This method modifies this bitset. We require that `this` and - * `rhs` are aligned (i.e. they both must have the same `offset` and - * `height`). + * This method modifies this bitset. We require that `this` and `rhs` are aligned (i.e. they both must have + * the same `offset` and `height`). */ private[algebird] def |=(rhs: BitSet): Unit /** - * Add a single value `n` to this bitset to this bitset or to the - * smallest valid bitset that could contain it. + * Add a single value `n` to this bitset to this bitset or to the smallest valid bitset that could contain + * it. * - * Unlike `+=` this method can be called with `n` outside of this - * node's range. If the value is in range, the method is equivalent - * to `+=` (and returns `this`). Otherwise, it wraps `this` in new - * branches until the node's range is large enough to contain `n`, - * then adds the value to that node, and returns it. + * Unlike `+=` this method can be called with `n` outside of this node's range. If the value is in range, + * the method is equivalent to `+=` (and returns `this`). Otherwise, it wraps `this` in new branches until + * the node's range is large enough to contain `n`, then adds the value to that node, and returns it. */ private[algebird] def mutableAdd(n: Int): BitSet @@ -210,15 +189,12 @@ sealed abstract class BitSet { lhs => /** * Return a compacted bitset containing the same values as this one. * - * This method is used to prune out "empty" branches that don't - * contain values. By default, bitset does not try to remove empty - * leaves when removing values (since repeatedly checking for this - * across many deletions would be expensive). + * This method is used to prune out "empty" branches that don't contain values. By default, bitset does not + * try to remove empty leaves when removing values (since repeatedly checking for this across many deletions + * would be expensive). * - * The bitset returned will have the same values as the current - * bitset, but is guaranteed not to contain any empty branches. - * Empty branches are not usually observable but would result in - * increased memory usage. + * The bitset returned will have the same values as the current bitset, but is guaranteed not to contain any + * empty branches. Empty branches are not usually observable but would result in increased memory usage. */ def compact: BitSet = { def recur(x: BitSet): BitSet = @@ -253,20 +229,18 @@ sealed abstract class BitSet { lhs => /** * Returns the number of distinct values in this bitset. * - * For branches, this method will return the sum of the sizes of all - * its subtrees. For leaves it returns the number of bits set in the - * leaf (i.e. the number of values the leaf contains). + * For branches, this method will return the sum of the sizes of all its subtrees. For leaves it returns the + * number of bits set in the leaf (i.e. the number of values the leaf contains). */ def size: Long /** * Iterate across all values in the bitset. * - * Values in the iterator will be seen in "unsigned order" (e.g. if - * present, -1 will always come last). Here's an abbreviated view of - * this order in practice: + * Values in the iterator will be seen in "unsigned order" (e.g. if present, -1 will always come last). + * Here's an abbreviated view of this order in practice: * - * 0, 1, 2, ... 2147483646, 2147483647, -2147483648, -2147483647, ... -1 + * 0, 1, 2, ... 2147483646, 2147483647, -2147483648, -2147483647, ... -1 * * (This "unsigned order" is identical to the tree's internal order.) */ @@ -282,10 +256,9 @@ sealed abstract class BitSet { lhs => /** * Present a view of this bitset as a `scala.Set[Int]`. * - * This is provided for compatibility with Scala collections. Many - * of the set operations are implemented in terms of `BitSet`, but - * other operations (for example `map`) may copy these values into a - * different `Set` implementation. + * This is provided for compatibility with Scala collections. Many of the set operations are implemented in + * terms of `BitSet`, but other operations (for example `map`) may copy these values into a different `Set` + * implementation. */ def toSet: Set[Int] = new compat.BitSetWrapperSet(this) @@ -303,8 +276,8 @@ sealed abstract class BitSet { lhs => /** * Produce a string representation of this BitSet. * - * This representation will contain all the values in the bitset. - * For large bitsets, this operation may be very expensive. + * This representation will contain all the values in the bitset. For large bitsets, this operation may be + * very expensive. */ override def toString: String = iterator.map(_.toString).mkString("BitSet(", ", ", ")") @@ -312,8 +285,8 @@ sealed abstract class BitSet { lhs => /** * Produce a structured representation of this BitSet. * - * This representation is for internal-use only. It gives a view of - * how the bitset is encoded in a tree, showing leaves and branches. + * This representation is for internal-use only. It gives a view of how the bitset is encoded in a tree, + * showing leaves and branches. */ private[algebird] def structure: String = // This is for debugging, we don't care about coverage here @@ -338,13 +311,11 @@ sealed abstract class BitSet { lhs => /** * Universal equality. * - * This method will only return true if the right argument is also a - * `BitSet`. It does not attempt to coerce either argument in any - * way (unlike Scala collections, for example). + * This method will only return true if the right argument is also a `BitSet`. It does not attempt to coerce + * either argument in any way (unlike Scala collections, for example). * - * Two bitsets can be equal even if they have different underlying - * tree structure. (For example, one bitset's tree may have empty - * branches that the other lacks.) + * Two bitsets can be equal even if they have different underlying tree structure. (For example, one + * bitset's tree may have empty branches that the other lacks.) */ override def equals(that: Any): Boolean = that match { @@ -362,9 +333,8 @@ sealed abstract class BitSet { lhs => /** * Universal hash code. * - * Bitsets that are the equal will hash to the same value. As in - * `equals`, the values present determine the hash code, as opposed - * to the tree structure. + * Bitsets that are the equal will hash to the same value. As in `equals`, the values present determine the + * hash code, as opposed to the tree structure. */ override def hashCode: Int = { var hash: Int = 1500450271 // prime number @@ -393,9 +363,8 @@ object BitSet { /** * Returns an empty leaf. * - * This is used internally with the assumption that it will be - * mutated to "add" values to it. In cases where no values need to - * be added, `empty` should be used instead. + * This is used internally with the assumption that it will be mutated to "add" values to it. In cases where + * no values need to be added, `empty` should be used instead. */ @inline private[algebird] def newEmpty(offset: Int): BitSet = Leaf(offset, new Array[Long](32)) @@ -427,8 +396,8 @@ object BitSet { } /** - * Given a value (`n`), and offset (`o`) and a height (`h`), compute - * the array index used to store the given value's bit. + * Given a value (`n`), and offset (`o`) and a height (`h`), compute the array index used to store the given + * value's bit. */ @inline private[algebird] def index(n: Int, o: Int, h: Int): Int = (n - o) >>> (h * 5 + 6) @@ -438,8 +407,7 @@ object BitSet { /** * Construct a parent for the given bitset. * - * The parent is guaranteed to be correctly aligned, and to have a - * height one greater than the given bitset. + * The parent is guaranteed to be correctly aligned, and to have a height one greater than the given bitset. */ private[algebird] def parentFor(b: BitSet): BitSet = { val h = b.height + 1 @@ -453,8 +421,8 @@ object BitSet { /** * Return a branch containing the given bitset `b` and value `n`. * - * This method assumes that `n` is outside of the range of `b`. It - * will return the smallest branch that contains both `b` and `n`. + * This method assumes that `n` is outside of the range of `b`. It will return the smallest branch that + * contains both `b` and `n`. */ @tailrec private def adoptedPlus(b: BitSet, n: Int): Branch = { @@ -478,9 +446,8 @@ object BitSet { /** * Return a branch containing the given bitsets `b` and `rhs`. * - * This method assumes that `rhs` is at least partially-outside of - * the range of `b`. It will return the smallest branch that - * contains both `b` and `rhs`. + * This method assumes that `rhs` is at least partially-outside of the range of `b`. It will return the + * smallest branch that contains both `b` and `rhs`. */ @tailrec private def adoptedUnion(b: BitSet, rhs: BitSet): BitSet = { @@ -1106,8 +1073,8 @@ object BitSet { /** * Efficient, low-level iterator for BitSet.Leaf values. * - * As mentioned in `BitSet.iterator`, this method will return values - * in unsigned order (e.g. Int.MaxValue comes before Int.MinValue). + * As mentioned in `BitSet.iterator`, this method will return values in unsigned order (e.g. Int.MaxValue + * comes before Int.MinValue). */ private class LeafIterator(offset: Int, values: Array[Long]) extends Iterator[Int] { var i: Int = 0 @@ -1144,8 +1111,7 @@ object BitSet { /** * Efficient, low-level reversed iterator for BitSet.Leaf values. * - * This class is very similar to LeafIterator but returns values in - * the reverse order. + * This class is very similar to LeafIterator but returns values in the reverse order. */ private class LeafReverseIterator(offset: Int, values: Array[Long]) extends Iterator[Int] { var i: Int = 31 diff --git a/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala index af1c1fa57..71a861075 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/immutable/BloomFilter.scala @@ -51,19 +51,17 @@ object BloomFilter { } /** - * Cardinality estimates are taken from Theorem 1 on page 15 of - * "Cardinality estimation and dynamic length adaptation for Bloom filters" - * by Papapetrou, Siberski, and Nejdl: + * Cardinality estimates are taken from Theorem 1 on page 15 of "Cardinality estimation and dynamic length + * adaptation for Bloom filters" by Papapetrou, Siberski, and Nejdl: * http://www.softnet.tuc.gr/~papapetrou/publications/Bloomfilters-DAPD.pdf * - * Roughly, by using bounds on the expected number of true bits after n elements - * have been inserted into the Bloom filter, we can go from the actual number of - * true bits (which is known) to an estimate of the cardinality. + * Roughly, by using bounds on the expected number of true bits after n elements have been inserted into the + * Bloom filter, we can go from the actual number of true bits (which is known) to an estimate of the + * cardinality. * - * approximationWidth defines an interval around the maximum-likelihood cardinality - * estimate. Namely, the approximation returned is of the form - * (min, estimate, max) = - * ((1 - approxWidth) * estimate, estimate, (1 + approxWidth) * estimate) + * approximationWidth defines an interval around the maximum-likelihood cardinality estimate. Namely, the + * approximation returned is of the form (min, estimate, max) = ((1 - approxWidth) * estimate, estimate, (1 + * + approxWidth) * estimate) */ def sizeEstimate( numBits: Int, @@ -74,17 +72,16 @@ object BloomFilter { assert(0 <= approximationWidth && approximationWidth < 1, "approximationWidth must lie in [0, 1)") /** - * s(n) is the expected number of bits that have been set to true after - * n elements have been inserted into the Bloom filter. - * This is \hat{S}(n) in the cardinality estimation paper used above. + * s(n) is the expected number of bits that have been set to true after n elements have been inserted into + * the Bloom filter. This is \hat{S}(n) in the cardinality estimation paper used above. */ def s(n: Int): Double = width * (1 - scala.math.pow(1 - 1.0 / width, numHashes * n)) /** - * sInverse(t) is the maximum likelihood value for the number of elements - * that have been inserted into the Bloom filter when it has t bits set to true. - * This is \hat{S}^{-1}(t) in the cardinality estimation paper used above. + * sInverse(t) is the maximum likelihood value for the number of elements that have been inserted into the + * Bloom filter when it has t bits set to true. This is \hat{S}^{-1}(t) in the cardinality estimation + * paper used above. */ def sInverse(t: Int): Double = scala.math.log1p(-t.toDouble / width) / (numHashes * scala.math.log1p(-1.0 / width)) @@ -111,9 +108,8 @@ object BloomFilter { /** * Bloom Filter - a probabilistic data structure to test presence of an element. * - * Operations - * 1) insert: hash the value k times, updating the bitfield at the index equal to each hashed value - * 2) query: hash the value k times. If there are k collisions, then return true; otherwise false. + * Operations 1) insert: hash the value k times, updating the bitfield at the index equal to each hashed value + * 2) query: hash the value k times. If there are k collisions, then return true; otherwise false. * * http://en.wikipedia.org/wiki/Bloom_filter */ @@ -215,8 +211,7 @@ final case class BloomFilter[A](numHashes: Int, width: Int)(implicit val hash: H } /** - * This may be faster if you don't care about evaluating - * the false positive probability + * This may be faster if you don't care about evaluating the false positive probability */ def maybeContains(item: A): Boolean @@ -227,9 +222,8 @@ final case class BloomFilter[A](numHashes: Int, width: Int)(implicit val hash: H def toBitSet: BitSet /** - * Compute the Hamming distance between the two Bloom filters - * `a` and `b`. The distance is defined as the number of bits that - * need to change to in order to transform one filter into the other. + * Compute the Hamming distance between the two Bloom filters `a` and `b`. The distance is defined as the + * number of bits that need to change to in order to transform one filter into the other. */ def hammingDistance(that: Hash): Int = (this, that) match { @@ -350,8 +344,8 @@ final case class BloomFilter[A](numHashes: Int, width: Int)(implicit val hash: H override val zero: Hash = Empty /** - * Assume the bloom filters are compatible (same width and same hashing functions). This - * is the union of the 2 bloom filters. + * Assume the bloom filters are compatible (same width and same hashing functions). This is the union of + * the 2 bloom filters. */ override def plus(left: Hash, right: Hash): Hash = left ++ right @@ -413,11 +407,11 @@ final case class BloomFilter[A](numHashes: Int, width: Int)(implicit val hash: H val empty: Hash = Empty /** - * Attempts to create a new BloomFilter instance from a [[BitSet]]. Failure might occur - * if the BitSet has a maximum entry behond the BloomFilter expected size. + * Attempts to create a new BloomFilter instance from a [[BitSet]]. Failure might occur if the BitSet has a + * maximum entry behond the BloomFilter expected size. * - * This method will be helpfull on BloomFilter desirialization. Serialization is achieved - * through the serialization of the underlying [[BitSet]]. + * This method will be helpfull on BloomFilter desirialization. Serialization is achieved through the + * serialization of the underlying [[BitSet]]. */ def fromBitSet(bitSet: BitSet): Try[Hash] = if (bitSet.isEmpty) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala index 20ff16da7..198d4021f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/Cuber.scala @@ -4,26 +4,21 @@ import scala.language.experimental.{macros => sMacros} import com.twitter.algebird.macros.MacroCompat._ /** - * "Cubes" a case class or tuple, i.e. for a tuple of type - * (T1, T2, ... , TN) generates all 2^N possible combinations of type - * (Option[T1], Option[T2], ... , Option[TN]). + * "Cubes" a case class or tuple, i.e. for a tuple of type (T1, T2, ... , TN) generates all 2^N possible + * combinations of type (Option[T1], Option[T2], ... , Option[TN]). * - * This is useful for comparing some metric across all possible subsets. - * For example, suppose we have a set of people represented as - * case class Person(gender: String, age: Int, height: Double) - * and we want to know the average height of - * - people, grouped by gender and age - * - people, grouped by only gender - * - people, grouped by only age - * - all people + * This is useful for comparing some metric across all possible subsets. For example, suppose we have a set of + * people represented as case class Person(gender: String, age: Int, height: Double) and we want to know the + * average height of + * - people, grouped by gender and age + * - people, grouped by only gender + * - people, grouped by only age + * - all people * - * Then we could do - * > import com.twitter.algebird.macros.Cuber.cuber - * > val people: List[People] - * > val averageHeights: Map[(Option[String], Option[Int]), Double] = - * > people.flatMap { p => cuber((p.gender, p.age)).map((_,p)) } - * > .groupBy(_._1) - * > .mapValues { xs => val heights = xs.map(_.height); heights.sum / heights.length } + * Then we could do > import com.twitter.algebird.macros.Cuber.cuber > val people: List[People] > val + * averageHeights: Map[(Option[String], Option[Int]), Double] = > people.flatMap { p => cuber((p.gender, + * p.age)).map((_,p)) } > .groupBy(_._1) > .mapValues { xs => val heights = xs.map(_.height); heights.sum / + * heights.length } */ trait Cuber[I] { type K @@ -48,7 +43,7 @@ object Cuber { val tupleName = { val types = getParamTypes(c) val optionTypes = types.map(t => tq"_root_.scala.Option[$t]") - val tupleType = typeName(c)(s"Tuple${arity}") + val tupleType = typeName(c)(s"Tuple$arity") tq"_root_.scala.$tupleType[..$optionTypes]" } @@ -63,9 +58,9 @@ object Cuber { } val cuber = q""" - new _root_.com.twitter.algebird.macros.Cuber[${T}] { + new _root_.com.twitter.algebird.macros.Cuber[$T] { type K = $tupleName - def apply(in: ${T}): _root_.scala.Seq[K] = { + def apply(in: $T): _root_.scala.Seq[K] = { ..$somes (0 until (1 << $arity)).map { i => new K(..$options) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala b/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala index 9ca6bc900..a328804b0 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/macros/Roller.scala @@ -5,31 +5,24 @@ import scala.language.experimental.{macros => sMacros} import com.twitter.algebird.macros.MacroCompat._ /** - * Given a TupleN, produces a sequence of (N + 1) tuples each of arity N - * such that, for all k from 0 to N, there is a tuple with k Somes - * followed by (N - k) Nones. + * Given a TupleN, produces a sequence of (N + 1) tuples each of arity N such that, for all k from 0 to N, + * there is a tuple with k Somes followed by (N - k) Nones. * - * This is useful for comparing some metric across multiple layers of - * some hierarchy. - * For example, suppose we have some climate data represented as - * case class Data(continent: String, country: String, city: String, temperature: Double) - * and we want to know the average temperatures of + * This is useful for comparing some metric across multiple layers of some hierarchy. For example, suppose we + * have some climate data represented as case class Data(continent: String, country: String, city: String, + * temperature: Double) and we want to know the average temperatures of * - each continent * - each (continent, country) pair * - each (continent, country, city) triple * - * Here we desire the (continent, country) and (continent, country, city) - * pair because, for example, if we grouped by city instead of by - * (continent, country, city), we would accidentally combine the results for + * Here we desire the (continent, country) and (continent, country, city) pair because, for example, if we + * grouped by city instead of by (continent, country, city), we would accidentally combine the results for * Paris, Texas and Paris, France. * - * Then we could do - * > import com.twitter.algebird.macros.Roller.roller - * > val data: List[Data] - * > val averageTemps: Map[(Option[String], Option[String], Option[String]), Double] = - * > data.flatMap { d => roller((d.continent, d.country, d.city)).map((_, d)) } - * > .groupBy(_._1) - * > .mapValues { xs => val temps = xs.map(_.temperature); temps.sum / temps.length } + * Then we could do > import com.twitter.algebird.macros.Roller.roller > val data: List[Data] > val + * averageTemps: Map[(Option[String], Option[String], Option[String]), Double] = > data.flatMap { d => + * roller((d.continent, d.country, d.city)).map((_, d)) } > .groupBy(_._1) > .mapValues { xs => val temps = + * xs.map(_.temperature); temps.sum / temps.length } */ trait Roller[I] { type K @@ -54,7 +47,7 @@ object Roller { val tupleName = { val types = getParamTypes(c) val optionTypes = types.map(t => tq"_root_.scala.Option[$t]") - val tupleType = typeName(c)(s"Tuple${arity}") + val tupleType = typeName(c)(s"Tuple$arity") tq"_root_.scala.$tupleType[..$optionTypes]" } @@ -72,9 +65,9 @@ object Roller { } val roller = q""" - new _root_.com.twitter.algebird.macros.Roller[${T}] { + new _root_.com.twitter.algebird.macros.Roller[$T] { type K = $tupleName - def apply(in: ${T}): _root_.scala.Seq[K] = { + def apply(in: $T): _root_.scala.Seq[K] = { ..$somes Seq(..$items) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala index 29e60316d..f970c43f3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala @@ -20,8 +20,8 @@ import scala.collection.mutable.{ArrayBuffer, Map => MMap} import com.twitter.algebird.{AdaptiveVector, Monoid} /** - * A Matrix structure that is designed to hide moving between sparse and dense representations - * Initial support here is focused on a dense row count with a sparse set of columns + * A Matrix structure that is designed to hide moving between sparse and dense representations Initial support + * here is focused on a dense row count with a sparse set of columns */ abstract class AdaptiveMatrix[V: Monoid] extends Serializable { def rows: Int diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala index d502da156..1da14102c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/StateWithError.scala @@ -19,10 +19,8 @@ package com.twitter.algebird.monad import com.twitter.algebird.{Monad, Semigroup} /** - * Monad to handle mutating input state and possible failures. - * This is used to interact in the planning phase with existing - * mutable APIs (like storm or cascading), but retain the ability - * to compose carefully. + * Monad to handle mutating input state and possible failures. This is used to interact in the planning phase + * with existing mutable APIs (like storm or cascading), but retain the ability to compose carefully. */ sealed trait StateWithError[S, +F, +T] { def join[F1 >: F, U]( @@ -104,8 +102,7 @@ object StateWithError { StateFn(_ => Left(f)) /** - * Use like fromEither[Int](Right("good")) - * to get a constant Either in the monad + * Use like fromEither[Int](Right("good")) to get a constant Either in the monad */ def fromEither[S]: ConstantStateMaker[S] = new ConstantStateMaker[S] class ConstantStateMaker[S] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala b/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala index 6e82042df..07e296c9b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/monad/Trampoline.scala @@ -46,9 +46,8 @@ object Trampoline { FlatMapped(unit, (_: Unit) => Done(a)) /** - * Use this to call to another trampoline returning function - * you break the effect of this if you directly recursively call a Trampoline - * returning function + * Use this to call to another trampoline returning function you break the effect of this if you directly + * recursively call a Trampoline returning function */ def call[A](layzee: => Trampoline[A]): Trampoline[A] = FlatMapped(unit, (_: Unit) => layzee) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala index 7326b3c24..3a5e212b8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala @@ -21,9 +21,9 @@ import scala.collection.JavaConverters._ import java.util.PriorityQueue /** - * This gives you the `max` smallest items. If you want the biggest reverse the Ordering. - * Note that PriorityQueue is mutable so it is a good idea to copy this into - * an immutable view before using it, as is done in PriorityQueueToListAggregator + * This gives you the `max` smallest items. If you want the biggest reverse the Ordering. Note that + * PriorityQueue is mutable so it is a good idea to copy this into an immutable view before using it, as is + * done in PriorityQueueToListAggregator */ abstract class PriorityQueueAggregator[A, +C](max: Int)(implicit ord: Ordering[A]) extends MonoidAggregator[A, PriorityQueue[A], C] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala index f49daf957..3d881eb22 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala @@ -20,11 +20,9 @@ import com.twitter.algebird.Monoid import java.util.PriorityQueue /** - * for sort-with take and better performance over large values - * The priority queues should be MAX queues, i.e. the ones we want least - * should be in the .peek position - * This is MUCH Faster for Top-K algorithms - * Note this is MUTABLE. When you put something in plus, it is changed! + * for sort-with take and better performance over large values The priority queues should be MAX queues, i.e. + * the ones we want least should be in the .peek position This is MUCH Faster for Top-K algorithms Note this + * is MUTABLE. When you put something in plus, it is changed! */ class PriorityQueueMonoid[K](max: Int)(implicit ord: Ordering[K]) extends Monoid[PriorityQueue[K]] { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/package.scala b/algebird-core/src/main/scala/com/twitter/algebird/package.scala index 0591ff3ce..381b34e57 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/package.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/package.scala @@ -19,9 +19,8 @@ package com.twitter package object algebird { /** - * TODO remove these in scala 2.11 and use the standard there. - * these are here to avoid massive bloat around these classes - * https://github.com/twitter/algebird/issues/263 + * TODO remove these in scala 2.11 and use the standard there. these are here to avoid massive bloat around + * these classes https://github.com/twitter/algebird/issues/263 */ private[algebird] abstract class AbstractIterable[T] extends Iterable[T] private[algebird] abstract class AbstractIterator[T] extends Iterator[T] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Counter.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Counter.scala index 3b7d2df38..a135b771c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Counter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Counter.scala @@ -19,8 +19,9 @@ package com.twitter.algebird.statistics import java.util.concurrent.atomic.AtomicLong /** - * Counter abstraction that can optionally be thread safe - * @author Julien Le Dem + * Counter abstraction that can optionally be thread safe + * @author + * Julien Le Dem */ private object Counter { def apply(threadSafe: Boolean): Counter = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala index 6f754b861..d4ea8f9ea 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala @@ -3,9 +3,10 @@ package com.twitter.algebird.statistics import com.twitter.algebird.Monoid /** - * @param mean Mean - * @param sigma2 Variance, where sqrt(sigma2) is the standard deviation - * aka Normal distribution + * @param mean + * Mean + * @param sigma2 + * Variance, where sqrt(sigma2) is the standard deviation aka Normal distribution */ case class GaussianDistribution(mean: Double, sigma2: Double) { def stddev: Double = math.sqrt(sigma2) @@ -19,9 +20,8 @@ object GaussianDistribution { } /** - * This monoid stems from the fact that if X and Y are independent random variables - * that are normally distributed, then their sum is also - * normally distributed, with its new mean equal to the sum of two means + * This monoid stems from the fact that if X and Y are independent random variables that are normally + * distributed, then their sum is also normally distributed, with its new mean equal to the sum of two means * and variance equal to the sum of two variances. * [[http://en.wikipedia.org/wiki/Sum_of_normally_distributed_random_variables]] */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala index 46a2f1bfc..5c3e4c37b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala @@ -16,8 +16,9 @@ limitations under the License. package com.twitter.algebird.statistics /** - * used to keep track of stats and time spent processing iterators passed to the methods - * @author Julien Le Dem + * used to keep track of stats and time spent processing iterators passed to the methods + * @author + * Julien Le Dem */ private class IterCallStatistics(threadSafe: Boolean) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala index c0e20ab2f..ce166c250 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala @@ -18,10 +18,11 @@ package com.twitter.algebird.statistics import com.twitter.algebird.{Group, Monoid, Ring, Semigroup} /** - * These wrappers can be used to collect statistics around usage of monoids - * They are thread safe unless false is passed to the constructor (to remove overhead when threads are not used) + * These wrappers can be used to collect statistics around usage of monoids They are thread safe unless false + * is passed to the constructor (to remove overhead when threads are not used) * - * @author Julien Le Dem + * @author + * Julien Le Dem */ /** collect statistics about the calls to the wrapped Semigroup */ class StatisticsSemigroup[T](threadSafe: Boolean = true)(implicit wrappedSemigroup: Semigroup[T]) @@ -49,7 +50,8 @@ class StatisticsSemigroup[T](threadSafe: Boolean = true)(implicit wrappedSemigro } /** - * @see StatisticsSemigroup + * @see + * StatisticsSemigroup */ class StatisticsMonoid[T](threadSafe: Boolean = true)(implicit wrappedMonoid: Monoid[T]) extends StatisticsSemigroup[T](threadSafe) @@ -78,7 +80,8 @@ class StatisticsMonoid[T](threadSafe: Boolean = true)(implicit wrappedMonoid: Mo } /** - * @see StatisticsSemigroup + * @see + * StatisticsSemigroup */ class StatisticsGroup[T](threadSafe: Boolean = true)(implicit group: Group[T]) extends StatisticsMonoid[T](threadSafe) @@ -108,7 +111,8 @@ class StatisticsGroup[T](threadSafe: Boolean = true)(implicit group: Group[T]) } /** - * @see StatisticsSemigroup + * @see + * StatisticsSemigroup */ class StatisticsRing[T](threadSafe: Boolean = true)(implicit ring: Ring[T]) extends StatisticsGroup[T](threadSafe) diff --git a/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala b/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala index c6c3e6d7e..a812c4a07 100644 --- a/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala +++ b/algebird-core/src/test/scala/com/twitter/algebird/AlgebraResolutionTest.scala @@ -3,8 +3,7 @@ package com.twitter.algebird import org.scalatest.funsuite.AnyFunSuite /** - * This is just a compilation test that we can resolve - * algebird types from implicit algebra instances. + * This is just a compilation test that we can resolve algebird types from implicit algebra instances. */ class AlgebraResolutionTest extends AnyFunSuite { // A type with no built in algebird algebras diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala index eadbcd899..70934be1a 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/EquivOrdering.scala @@ -22,8 +22,7 @@ object EquivOrdering extends EquivOrdering1 { } /** - * this is intentionally not implicit to avoid superceding the instance that may be - * set up in a companion + * this is intentionally not implicit to avoid superceding the instance that may be set up in a companion * * use it with implicit val myOrd: Ordering[MyType] = genericOrdering */ @@ -42,8 +41,7 @@ abstract class EquivOrdering1 { } /** - * this is intentionally not implicit to avoid superceding the instance that may be - * set up in a companion + * this is intentionally not implicit to avoid superceding the instance that may be set up in a companion * * use it with implicit val myEqv: Equiv[MyType] = genericEquiv */ diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala index eb151aa14..34a67ceaa 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala @@ -25,8 +25,7 @@ abstract class Shapeless3 extends Shapeless2 { new HConsRing(a, lb.value) /** - * this is intentionally not implicit to avoid superceding the instance that may be - * set up in a companion + * this is intentionally not implicit to avoid superceding the instance that may be set up in a companion * * use it with implicit val myRing: Ring[MyType] = genericRing */ @@ -49,8 +48,7 @@ abstract class Shapeless2 extends Shapeless1 { new HConsGroup(a, lb.value) /** - * this is intentionally not implicit to avoid superceding the instance that may be - * set up in a companion + * this is intentionally not implicit to avoid superceding the instance that may be set up in a companion * * use it with implicit val myGroup: Group[MyType] = genericGroup */ @@ -73,8 +71,7 @@ abstract class Shapeless1 extends Shapeless0 { new HConsMonoid(a, lb.value) /** - * this is intentionally not implicit to avoid superceding the instance that may be - * set up in a companion + * this is intentionally not implicit to avoid superceding the instance that may be set up in a companion * * use it with implicit val myMonoid: Monoid[MyType] = genericMonoid */ @@ -94,8 +91,7 @@ abstract class Shapeless0 { new HConsSemigroup[A, B](a, lb.value) /** - * this is intentionally not implicit to avoid superceding the instance that may be - * set up in a companion + * this is intentionally not implicit to avoid superceding the instance that may be set up in a companion * * use it with implicit val mySemigroup: Semigroup[MyType] = genericSemigroup */ diff --git a/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala b/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala index a1e467f47..0a12a3856 100644 --- a/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala +++ b/algebird-spark/src/main/scala/com/twitter/algebird/spark/AlgebirdRDD.scala @@ -5,19 +5,15 @@ import org.apache.spark.Partitioner import scala.reflect.ClassTag /** - * import com.twitter.algebird.spark.ToAlgebird - * to get the enrichment to do: - * myRdd.algebird: AlgebirdRDD[T] + * import com.twitter.algebird.spark.ToAlgebird to get the enrichment to do: myRdd.algebird: AlgebirdRDD[T] * * This adds methods to Spark RDDs to use Algebird */ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { /** - * Apply an Aggregator to return a single value for the whole RDD. - * If the RDD is empty, None is returned - * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for - * T. + * Apply an Aggregator to return a single value for the whole RDD. If the RDD is empty, None is returned + * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for T. */ def aggregateOption[B: ClassTag, C](agg: Aggregator[T, B, C]): Option[C] = { val pr = rdd.mapPartitions( @@ -37,9 +33,8 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { } /** - * This will throw if you use a non-MonoidAggregator with an empty RDD - * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for - * T. + * This will throw if you use a non-MonoidAggregator with an empty RDD requires a commutative Semigroup. To + * generalize to non-commutative, we need a sorted partition for T. */ def aggregate[B: ClassTag, C](agg: Aggregator[T, B, C]): C = (aggregateOption[B, C](agg), agg.semigroup) match { @@ -49,9 +44,8 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { } /** - * Apply an Aggregator to the values for each key. - * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for - * T. + * Apply an Aggregator to the values for each key. requires a commutative Semigroup. To generalize to + * non-commutative, we need a sorted partition for T. */ def aggregateByKey[K: ClassTag, V1, U: ClassTag, V2]( agg: Aggregator[V1, U, V2] @@ -64,9 +58,8 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { new PairRDDFunctions(kv)(implicitly, implicitly, ordK.getPreferred.orNull) /** - * Apply an Aggregator to the values for each key with a custom Partitioner. - * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for - * T. + * Apply an Aggregator to the values for each key with a custom Partitioner. requires a commutative + * Semigroup. To generalize to non-commutative, we need a sorted partition for T. */ def aggregateByKey[K: ClassTag, V1, U: ClassTag, V2]( part: Partitioner, @@ -88,9 +81,8 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { private def keyed[K, V](implicit ev: T <:< (K, V)): RDD[(K, V)] = rdd.asInstanceOf[RDD[(K, V)]] /** - * Use the implicit semigroup to sum by keys - * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for - * T. + * Use the implicit semigroup to sum by keys requires a commutative Semigroup. To generalize to + * non-commutative, we need a sorted partition for T. */ def sumByKey[K: ClassTag, V: ClassTag: Semigroup](implicit ev: T <:< (K, V), @@ -99,10 +91,9 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { sumByKey(Partitioner.defaultPartitioner(rdd)) /** - * Use the implicit semigroup to sum by keys with a custom Partitioner. - * requires a commutative Semigroup. To generalize to non-commutative, we need a sorted partition for - * T. - * Unfortunately we need to use a different name than sumByKey in scala 2.11 + * Use the implicit semigroup to sum by keys with a custom Partitioner. requires a commutative Semigroup. To + * generalize to non-commutative, we need a sorted partition for T. Unfortunately we need to use a different + * name than sumByKey in scala 2.11 */ def sumByKey[K: ClassTag, V: ClassTag: Semigroup]( part: Partitioner @@ -110,17 +101,15 @@ class AlgebirdRDD[T](val rdd: RDD[T]) extends AnyVal { toPair(keyed).reduceByKey(part, implicitly[Semigroup[V]].plus _) /** - * Use the implicit Monoid to sum all items. If RDD is empty, Monoid.zero is returned - * requires a commutative Monoid. To generalize to non-commutative, we need a sorted partition for - * T. + * Use the implicit Monoid to sum all items. If RDD is empty, Monoid.zero is returned requires a commutative + * Monoid. To generalize to non-commutative, we need a sorted partition for T. */ def sum(implicit mon: Monoid[T], ct: ClassTag[T]): T = sumOption.getOrElse(mon.zero) /** - * Use the implicit Semigroup to sum all items. If there are no items, None is returned. - * requires a commutative Monoid. To generalize to non-commutative, we need a sorted partition for - * T. + * Use the implicit Semigroup to sum all items. If there are no items, None is returned. requires a + * commutative Monoid. To generalize to non-commutative, we need a sorted partition for T. */ def sumOption(implicit sg: Semigroup[T], ct: ClassTag[T]): Option[T] = { val partialReduce: RDD[T] = diff --git a/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala b/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala index f8fba3ee0..f7abb570e 100644 --- a/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala +++ b/algebird-spark/src/main/scala/com/twitter/algebird/spark/package.scala @@ -5,8 +5,7 @@ import org.apache.spark.rdd.RDD import scala.reflect.ClassTag /** - * To use this, you probably want: - * import com.twitter.algebird.spark._ + * To use this, you probably want: import com.twitter.algebird.spark._ */ package object spark { diff --git a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala index 13634d238..d61a929cb 100644 --- a/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala +++ b/algebird-spark/src/test/scala/com/twitter/algebird/spark/AlgebirdRDDTests.scala @@ -16,8 +16,7 @@ package test { } /** - * This test almost always times out on travis. - * Leaving at least a compilation test of using with spark + * This test almost always times out on travis. Leaving at least a compilation test of using with spark */ class AlgebirdRDDTest extends AnyFunSuite with BeforeAndAfter { @@ -73,8 +72,7 @@ class AlgebirdRDDTest extends AnyFunSuite with BeforeAndAfter { } /** - * These tests almost always timeout on Travis. Leaving the - * above to at least check compilation + * These tests almost always timeout on Travis. Leaving the above to at least check compilation */ // test("aggregate") { // aggregate(0 to 1000, AlgebirdAggregator.fromSemigroup[Int]) diff --git a/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala b/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala index d9a5e1f14..736623c4b 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/ApproximateProperty.scala @@ -21,9 +21,8 @@ trait ApproximateProperty { object ApproximateProperty { /** - * Generates a list of exactly n Ts. - * Useful because `Gen.listOfN(n, gen).sample` gives us Option[List[T]], - * while we often want List[T]. + * Generates a list of exactly n Ts. Useful because `Gen.listOfN(n, gen).sample` gives us Option[List[T]], + * while we often want List[T]. */ @annotation.tailrec private def genListOf[T](n: Int, gen: Gen[T], trial: Int = 100): List[T] = @@ -94,7 +93,7 @@ object ApproximateProperty { List( ( "Omitted results", - s"${zeroProbTests}/${objectReps * inputReps} tests returned an Approximate with probability 0. These tests have been omitted from the calculation." + s"$zeroProbTests/${objectReps * inputReps} tests returned an Approximate with probability 0. These tests have been omitted from the calculation." ) ) } else List() @@ -110,10 +109,9 @@ object ApproximateProperty { } /** - * Converts a list of ApproximateProperties to a scalacheck Prop that - * fails if too many of the ApproximateProperties fail. - * TODO use `new Prop` like the above `toProp` method so that we can - * have useful error messages. + * Converts a list of ApproximateProperties to a scalacheck Prop that fails if too many of the + * ApproximateProperties fail. TODO use `new Prop` like the above `toProp` method so that we can have useful + * error messages. */ def toProp( a: Seq[ApproximateProperty], @@ -135,8 +133,8 @@ object ApproximateProperty { } /** - * All tests that use ApproximateProperty should extend from this class so that - * the scalacheck property is run exactly once. + * All tests that use ApproximateProperty should extend from this class so that the scalacheck property is run + * exactly once. */ abstract class ApproximateProperties(name: String) extends Properties(name) { override def overrideParameters(p: Test.Parameters): Test.Parameters = diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala index fab1f4185..e96cb2c5f 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala @@ -27,10 +27,9 @@ import scala.math.Equiv object BaseProperties extends MetricProperties { /** - * We generate a restricted set of BigDecimals for our tests because if we use - * the full range then the way we lose precision in addition does not satisfy - * the distributive property perfectly. This means BigDecimal isn't truly - * a Ring under it's very strict Equiv. + * We generate a restricted set of BigDecimals for our tests because if we use the full range then the way + * we lose precision in addition does not satisfy the distributive property perfectly. This means BigDecimal + * isn't truly a Ring under it's very strict Equiv. */ val arbReasonableBigDecimals: Arbitrary[BigDecimal] = Arbitrary(for { scale <- Gen.choose(-7, +7) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala index d7acbd291..dd195d161 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/AggregatorLaws.scala @@ -22,7 +22,7 @@ import org.scalacheck.Prop import org.scalatest.funsuite.AnyFunSuite /** - * Unit tests to highlight specific examples of the properties we guarantee. + * Unit tests to highlight specific examples of the properties we guarantee. */ class AggregatorTests extends AnyFunSuite { test("Kahan summation mitigates Double error accumulation") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala index d72e87b14..46ebc6826 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/BloomFilterTest.scala @@ -132,7 +132,7 @@ class BFHashIndices extends CheckProperties { } /** - * This is the version of the BFHash as of before the "negative values fix" + * This is the version of the BFHash as of before the "negative values fix" */ case class NegativeBFHash(numHashes: Int, width: Int) { val size: Int = numHashes diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala b/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala index ea092aaff..6230ca562 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CheckProperties.scala @@ -4,7 +4,8 @@ import org.scalatestplus.scalacheck.Checkers import org.scalatest.propspec.AnyPropSpec /** - * @author Mansur Ashraf. + * @author + * Mansur Ashraf. */ trait CheckProperties extends AnyPropSpec with Checkers { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala index 63e1c103c..ed54d0671 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala @@ -66,15 +66,14 @@ class CombinatorTest extends CheckProperties { // Make sure the sets start sorted: implicit def topKArb: Arbitrary[(Map[Int, Int], Set[Int])] = Arbitrary { - for ( - s <- Arbitrary.arbitrary[List[Int]]; - smallvals = s.map(_ % 31); + for { + s <- Arbitrary.arbitrary[List[Int]] + smallvals = s.map(_ % 31) m = smallvals .groupBy(s => s) .mapValues(_.size) - ) - yield monTopK.plus(monTopK.zero, (m.toMap, smallvals.toSet)) + } yield monTopK.plus(monTopK.zero, (m.toMap, smallvals.toSet)) } property("MonoidCombinator with top-K forms a Monoid") { monoidLaws[(Map[Int, Int], Set[Int])] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala index e10418ffe..f302dbf2d 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala @@ -179,7 +179,8 @@ class CMSInstanceTest extends AnyWordSpec with Matchers with ScalaCheckDrivenPro } /** - * Verifies contramap functionality, which allows us to translate `CMSHasher[K]` into `CMSHasher[L]`, given `f: L => K`. + * Verifies contramap functionality, which allows us to translate `CMSHasher[K]` into `CMSHasher[L]`, given + * `f: L => K`. */ class CMSContraMapSpec extends AnyWordSpec with Matchers with ScalaCheckDrivenPropertyChecks { @@ -450,8 +451,10 @@ abstract class CMSTest[K: CMSHasher](toK: Int => K) /** * Creates a random data stream. * - * @param size Number of stream elements. - * @param range Elements are randomly drawn from [0, range). + * @param size + * Number of stream elements. + * @param range + * Elements are randomly drawn from [0, range). * @return */ def createRandomStream(size: Int, range: Int, rnd: Random = RAND): Seq[K] = { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index a1560d707..fc5fa7015 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -186,10 +186,9 @@ class EventuallyAggregatorLaws extends AnyPropSpec with ScalaCheckPropertyChecks property("EventuallyAggregator converts correctly") { /** - * Note, not all middle functions with all mustConvert are lawful. - * here we are forcing a structure such that a + b >= a, b for the middle type, - * and the mustConvert is a threshold on some projection of that middle type. - * You should check the laws for each type you care about. + * Note, not all middle functions with all mustConvert are lawful. here we are forcing a structure such + * that a + b >= a, b for the middle type, and the mustConvert is a threshold on some projection of that + * middle type. You should check the laws for each type you care about. * * For HLL/Set, which is the common example, this is lawful. */ diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala index dc9f81c37..ae8023b2f 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ExpHistLaws.scala @@ -25,13 +25,13 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { } /** - * An "exponential histogram" tracks the count of a sliding window - * with a fixed maximum relative error. The core guarantees are: + * An "exponential histogram" tracks the count of a sliding window with a fixed maximum relative error. The + * core guarantees are: * - * - The actual sum will always be within the tracked bounds - * - The EH's guess is always within epsilon the actual. - * - The relative error of the count is at most epsilon - * - the relative error is always between 0 and 0.5. + * - The actual sum will always be within the tracked bounds + * - The EH's guess is always within epsilon the actual. + * - The relative error of the count is at most epsilon + * - the relative error is always between 0 and 0.5. */ def checkCoreProperties(eh: ExpHist, actualSum: Long): Unit = { assert(eh.lowerBoundSum <= actualSum) @@ -48,8 +48,8 @@ class ExpHistLaws extends AnyPropSpec with ScalaCheckPropertyChecks { } /** - * Returns the ACTUAL sum of the supplied vector of buckets, - * filtering out any bucket with a timestamp <= exclusiveCutoff. + * Returns the ACTUAL sum of the supplied vector of buckets, filtering out any bucket with a timestamp <= + * exclusiveCutoff. */ def actualBucketSum(buckets: Vector[Bucket], exclusiveCutoff: Timestamp): Long = buckets.collect { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index afd61692e..0e44cb8f1 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -70,8 +70,7 @@ class HyperLogLogLaws extends CheckProperties { } /** - * We can't change the way Array[Byte] was hashed without breaking - * serialized HLLs + * We can't change the way Array[Byte] was hashed without breaking serialized HLLs */ property("HyperLogLog.hash matches reference") { Prop.forAll { a: Array[Byte] => HyperLogLog.hash(a).toSeq == ReferenceHyperLogLog.hash(a).toSeq } @@ -161,8 +160,7 @@ class HLLIntersectionProperty[T: Hash128: Gen](bits: Int, numHlls: Int) extends } /** - * SetSizeAggregator should work as an aggregator and return - * approximate size when > maxSetSize + * SetSizeAggregator should work as an aggregator and return approximate size when > maxSetSize */ abstract class SetSizeAggregatorProperty[T] extends ApproximateProperty { type Exact = Set[T] diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 357688658..aff446ebd 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -97,8 +97,7 @@ class MomentsTest extends AnyWordSpec with Matchers { assert(approxEq(1e-10)(f1, f2)) /** - * Given a list of doubles, create a Moments object to hold - * the list's central moments. + * Given a list of doubles, create a Moments object to hold the list's central moments. */ def getMoments(xs: List[Double]): Moments = MomentsAggregator(xs) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala index 05bbccc2e..66b37aa39 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/NumericSpecification.scala @@ -7,9 +7,8 @@ import org.scalatest.propspec.AnyPropSpec import org.scalatest.compatible.Assertion /** - * Tests abstract algebra against scala's Numeric trait - * Numeric is basically the ring trait with ordering, so we can use it - * below to test all the numeric traits. + * Tests abstract algebra against scala's Numeric trait Numeric is basically the ring trait with ordering, so + * we can use it below to test all the numeric traits. */ class NumericSpecification extends AnyPropSpec with ScalaCheckPropertyChecks with Matchers { def plusNumericProp[T: Monoid: Numeric: Arbitrary]: Assertion = forAll { (a: T, b: T) => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala index 7fed100f9..7d95ba3ce 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala @@ -27,8 +27,7 @@ class SetDiffTest extends AnyWordSpec with Matchers with Checkers { } /** - * This is the core law, along with associativity that allows - * us to reason about set SetDiffs. + * This is the core law, along with associativity that allows us to reason about set SetDiffs. */ "SetDiffs are the same as updating the set" in { check { (init: Set[Int], items: List[Either[Int, Int]]) => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala index caffac678..f1ab5b373 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala @@ -48,8 +48,7 @@ class TopKTests extends CheckProperties { } /** - * The following were specific bugs that we failed some prior - * scalacheck (yay for randomized testing) + * The following were specific bugs that we failed some prior scalacheck (yay for randomized testing) */ val pqPriorBugs: Seq[List[List[Int]]] = Seq(List(List(1, 1, 1, 2), List(0, 0, 0, 0, 0, 0, 0))) property("Specific regressions are handled") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala index bef08a706..aa0dd3e72 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala @@ -112,7 +112,7 @@ object BitSetTest extends Properties("BitSet") { property("(x + a)(a)") = forAll { (x: BitSet, a: Int) => val y = x + a - y(a) :| s"$y(${a})" + y(a) :| s"$y($a)" } property("!(x - a)(a)") = forAll { (x: BitSet, a: Int) => diff --git a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala index bbc0a74f7..56f314067 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BloomFilterTest.scala @@ -134,7 +134,7 @@ class ImmutableBloomFilterHashIndices extends CheckProperties { } /** - * This is the version of the Hash as of before the "negative values fix" + * This is the version of the Hash as of before the "negative values fix" */ case class NegativeHash(numHashes: Int, width: Int) { val size = numHashes diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala index 460af9c2b..112945300 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala @@ -19,10 +19,9 @@ import com.twitter.algebird._ import com.twitter.util.Promise /** - * This Monoid allows code to depend on the result of computation asynchronously. - * This is a slightly less general version of the TunnelMonoid. See the documentation - * for TunnelMonoid for general motivation. NOTE: the Promise will be fulfilled with - * the value just before the PromiseLink is calculated. + * This Monoid allows code to depend on the result of computation asynchronously. This is a slightly less + * general version of the TunnelMonoid. See the documentation for TunnelMonoid for general motivation. NOTE: + * the Promise will be fulfilled with the value just before the PromiseLink is calculated. */ class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { //TODo(jcoveney) rename PromiseLink def zero: PromiseLink[V] = PromiseLink(new Promise, monoid.zero) @@ -37,8 +36,8 @@ class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { / } /** - * This class allows code to depends on the data that a value will be combined with, - * fulfilling the Promise with the value just before the value is added in. + * This class allows code to depends on the data that a value will be combined with, fulfilling the Promise + * with the value just before the value is added in. */ case class PromiseLink[V](promise: Promise[V], value: V) { def completeWithStartingValue(startingV: V)(implicit monoid: Monoid[V]): V = { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala index 025c67bb1..041aedf36 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/TunnelMonoid.scala @@ -19,14 +19,11 @@ import com.twitter.algebird._ import com.twitter.util.{Future, Promise, Return} /** - * This Monoid allows code to depends on the results of asynchronous - * computation. It is relatively common to have code which takes a - * Monoid and elements, but applies the computation in an opaque way - * (a cache, for example). This allows the code handing over the - * elements (in this case, Tunnel objects) to depend on the result - * of the Monoid's computation. Note that this code does not depend - * on any particular Monoid -- that dependency is strictly when the Tunnel - * objects are created. This is the async analogue of Function1Monoid. + * This Monoid allows code to depends on the results of asynchronous computation. It is relatively common to + * have code which takes a Monoid and elements, but applies the computation in an opaque way (a cache, for + * example). This allows the code handing over the elements (in this case, Tunnel objects) to depend on the + * result of the Monoid's computation. Note that this code does not depend on any particular Monoid -- that + * dependency is strictly when the Tunnel objects are created. This is the async analogue of Function1Monoid. */ class TunnelMonoid[V] extends Monoid[Tunnel[V]] { def zero: Tunnel[V] = { @@ -44,11 +41,10 @@ class TunnelMonoid[V] extends Monoid[Tunnel[V]] { } /** - * The tunnel class represents a piece of computation that depends on the - * fulfilment of a promise. IMPORTANT: see apply, but Tunnels are mutable, - * and can only be fulfilled once. They are generally not reusable. Reusing - * a Tunnel in computation by a TunnelMonoid will cause the promise to be - * fulfilled more than once which will most likely lead to errors. + * The tunnel class represents a piece of computation that depends on the fulfilment of a promise. IMPORTANT: + * see apply, but Tunnels are mutable, and can only be fulfilled once. They are generally not reusable. + * Reusing a Tunnel in computation by a TunnelMonoid will cause the promise to be fulfilled more than once + * which will most likely lead to errors. */ case class Tunnel[V](future: Future[V], promise: Promise[V]) { def willEqual(other: Tunnel[V]): Future[Boolean] = @@ -58,9 +54,8 @@ case class Tunnel[V](future: Future[V], promise: Promise[V]) { } yield b1 == b2 /** - * This takes in a value and updates the promise, fulfilling the chain - * of futures which depends on this final promise. IMPORTANT: this can - * only be called once. In this way, it is dangerous to reuse Tunnel + * This takes in a value and updates the promise, fulfilling the chain of futures which depends on this + * final promise. IMPORTANT: this can only be called once. In this way, it is dangerous to reuse Tunnel * objects in Monoid code that might reuse objects. */ def apply(v: V): Future[V] = { @@ -73,8 +68,8 @@ object Tunnel { implicit def monoid[V]: TunnelMonoid[V] = new TunnelMonoid[V] /** - * This lifts a value into a Tunnel. This is where the Monoidic - * computation underlying a TunnelMonoid actually happens. + * This lifts a value into a Tunnel. This is where the Monoidic computation underlying a TunnelMonoid + * actually happens. */ def toIncrement[V](v: V)(implicit monoid: Monoid[V]): Tunnel[V] = { val promise = new Promise[V] @@ -82,9 +77,8 @@ object Tunnel { } /** - * This attempts to fulfil the promise. If it has already been fulfilled, - * this will throw an error if the value is different from the previous - * value that was used. + * This attempts to fulfil the promise. If it has already been fulfilled, this will throw an error if the + * value is different from the previous value that was used. */ def properPromiseUpdate[V](promise: Promise[V], newV: V): Unit = if (!promise.updateIfEmpty(Return(newV))) { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala index d19bb5d73..c66e7a8be 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala @@ -20,10 +20,11 @@ import com.twitter.util.{Future, FuturePool} import scala.collection.mutable.{ListBuffer, Map => MMap} /** - * @author Ian O Connell + * @author + * Ian O Connell * - * This is a simple asyncronous summer, where a shared mutable map is used between all readers/writers. - * When flushing it acquires the lock, drains the mutable map but does the compaction without holding the lock. + * This is a simple asyncronous summer, where a shared mutable map is used between all readers/writers. When + * flushing it acquires the lock, drains the mutable map but does the compaction without holding the lock. */ class AsyncListMMapSum[Key, Value]( bufferSize: BufferSize, diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala index aca2517d7..b1236bbbe 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala @@ -26,7 +26,8 @@ import scala.collection.mutable.{Set => MSet} import com.twitter.algebird.util.UtilAlgebras._ /** - * @author Ian O Connell + * @author + * Ian O Connell */ class AsyncListSum[Key, Value]( bufferSize: BufferSize, diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala index 2ea9c70de..857ebe310 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala @@ -22,7 +22,8 @@ import scala.collection.mutable.ArrayBuffer import scala.collection.JavaConverters._ /** - * @author Ian O Connell + * @author + * Ian O Connell */ class AsyncMapSum[Key, Value]( bufferSize: BufferSize, diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala index 5d60e9f7b..54a23b6e8 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala @@ -18,7 +18,8 @@ package com.twitter.algebird.util.summer import com.twitter.util.Future /** - * @author Ian O Connell + * @author + * Ian O Connell */ trait AsyncSummer[T, +M <: Iterable[T]] { self => def flush: Future[M] diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/BufferSize.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/BufferSize.scala index c64d080e1..40071b655 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/BufferSize.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/BufferSize.scala @@ -16,6 +16,7 @@ limitations under the License. package com.twitter.algebird.util.summer /** - * @author Ian O Connell + * @author + * Ian O Connell */ case class BufferSize(v: Int) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/FlushFrequency.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/FlushFrequency.scala index 459e0b87b..45b973b49 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/FlushFrequency.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/FlushFrequency.scala @@ -18,6 +18,7 @@ package com.twitter.algebird.util.summer import com.twitter.util.Duration /** - * @author Ian O Connell + * @author + * Ian O Connell */ case class FlushFrequency(v: Duration) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala index 734cc8dfd..6b74fe282 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala @@ -21,7 +21,8 @@ import com.twitter.util.Future import scala.collection.mutable.ListBuffer /** - * @author Ian O Connell + * @author + * Ian O Connell * * This class is designed to use a local mutable CMS to skip keeping low freqeuncy keys in a buffer. */ diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/Incrementor.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/Incrementor.scala index 685a3c24e..8425d05c1 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/Incrementor.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/Incrementor.scala @@ -1,7 +1,8 @@ package com.twitter.algebird.util.summer /** - * @author Mansur Ashraf. + * @author + * Mansur Ashraf. */ trait Incrementor { def incr(): Unit diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/MemoryFlushPercent.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/MemoryFlushPercent.scala index ff0b4b3f1..a7892edce 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/MemoryFlushPercent.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/MemoryFlushPercent.scala @@ -16,6 +16,7 @@ limitations under the License. package com.twitter.algebird.util.summer /** - * @author Ian O Connell + * @author + * Ian O Connell */ case class MemoryFlushPercent(v: Float) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala index 294d9369e..133f880eb 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala @@ -19,7 +19,8 @@ import com.twitter.algebird._ import com.twitter.util.Future /** - * @author Ian O Connell + * @author + * Ian O Connell */ class NullSummer[Key, Value](tuplesIn: Incrementor, tuplesOut: Incrementor)(implicit semigroup: Semigroup[Value] diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala index 77a6bbf29..b1e268342 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala @@ -24,7 +24,8 @@ import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer /** - * @author Ian O Connell + * @author + * Ian O Connell */ case class SyncSummingQueue[Key, Value]( bufferSize: BufferSize, @@ -74,9 +75,8 @@ class CustomSummingQueue[V](capacity: Int, sizeIncr: Incrementor, putCalls: Incr if (capacity > 0) Some(new ArrayBlockingQueue[V](capacity, true)) else None /** - * puts an item to the queue, optionally sums up the queue and returns value - * This never blocks interally. It uses offer. If the queue is full, we drain, - * sum the queue. + * puts an item to the queue, optionally sums up the queue and returns value This never blocks interally. It + * uses offer. If the queue is full, we drain, sum the queue. */ final def put(item: V): Option[V] = if (queueOption.isDefined) { diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala index 97c108535..4127fa8a8 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/Counter.scala @@ -3,7 +3,8 @@ package com.twitter.algebird.util.summer import java.util.concurrent.atomic.AtomicLong /** - * @author Mansur Ashraf. + * @author + * Mansur Ashraf. */ case class Counter(name: String) extends Incrementor { private val counter = new AtomicLong() diff --git a/build.sbt b/build.sbt index cd4accffb..6c937ceed 100644 --- a/build.sbt +++ b/build.sbt @@ -176,8 +176,7 @@ lazy val mimaSettings = Def.settings( ) /** - * This returns the previous jar we released that is compatible with - * the current. + * This returns the previous jar we released that is compatible with the current. */ val noBinaryCompatCheck = Set[String]("benchmark", "caliper", "spark") diff --git a/project/GenTupleAggregators.scala b/project/GenTupleAggregators.scala index bda5634e2..44915c890 100644 --- a/project/GenTupleAggregators.scala +++ b/project/GenTupleAggregators.scala @@ -105,10 +105,10 @@ object MultiAggregator { // there's no Semigroup[Tuple1[T]], so just use T as intermediary type instead of Tuple1[T] // TODO: keys for 1 item val aggregatorForOneItem = s""" - |def apply[K, A, B, C](agg: (K, ${inputAggregatorType}[A, B, C])): ${mapAggregatorType}[A, B, K, C] = { - | new ${mapAggregatorType}[A, B, K, C] { + |def apply[K, A, B, C](agg: (K, $inputAggregatorType[A, B, C])): $mapAggregatorType[A, B, K, C] = { + | new $mapAggregatorType[A, B, K, C] { | def prepare(a: A) = agg._2.prepare(a) - | val ${semigroupType} = agg._2.${semigroupType} + | val $semigroupType = agg._2.$semigroupType | def present(b: B) = Map(agg._1 -> agg._2.present(b)) | def keys = Set(agg._1) | } @@ -119,26 +119,26 @@ object MultiAggregator { .map { aggrCount => val aggrNums = 1 to aggrCount - val inputAggs = aggrNums.map(i => s"agg$i: (K, ${inputAggregatorType}[A, B$i, C])").mkString(", ") + val inputAggs = aggrNums.map(i => s"agg$i: (K, $inputAggregatorType[A, B$i, C])").mkString(", ") val bs = aggrNums.map("B" + _).mkString(", ") - val tupleBs = s"Tuple${aggrCount}[$bs]" + val tupleBs = s"Tuple$aggrCount[$bs]" s""" - |def apply[K, A, $bs, C]($inputAggs): ${mapAggregatorType}[A, $tupleBs, K, C] = { - | new ${mapAggregatorType}[A, $tupleBs, K, C] { + |def apply[K, A, $bs, C]($inputAggs): $mapAggregatorType[A, $tupleBs, K, C] = { + | new $mapAggregatorType[A, $tupleBs, K, C] { | def prepare(a: A) = ( - | ${aggrNums.map(i => s"agg${i}._2.prepare(a)").mkString(", ")} + | ${aggrNums.map(i => s"agg$i._2.prepare(a)").mkString(", ")} | ) | // a field for semigroup/monoid that combines all input aggregators - | val $semigroupType = new Tuple${aggrCount}${semigroupType.capitalize}()( - | ${aggrNums.map(i => s"agg${i}._2.$semigroupType").mkString(", ")} + | val $semigroupType = new Tuple$aggrCount${semigroupType.capitalize}()( + | ${aggrNums.map(i => s"agg$i._2.$semigroupType").mkString(", ")} | ) | def present(b: $tupleBs) = Map( - | ${aggrNums.map(i => s"agg${i}._1 -> agg${i}._2.present(b._${i})").mkString(", ")} + | ${aggrNums.map(i => s"agg$i._1 -> agg$i._2.present(b._$i)").mkString(", ")} | ) | def keys: Set[K] = Set( - | ${aggrNums.map(i => s"agg${i}._1").mkString(", ")} + | ${aggrNums.map(i => s"agg$i._1").mkString(", ")} | ) | } |}""".stripMargin From ce238700ef2e0a67f6c6de9b2a4be7d2842e3bdb Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 4 Sep 2021 05:13:31 +0200 Subject: [PATCH 212/306] Update kind-projector to 0.13.2 (#1000) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 6c937ceed..ce0335701 100644 --- a/build.sbt +++ b/build.sbt @@ -5,7 +5,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" val javaEwahVersion = "1.1.12" -val kindProjectorVersion = "0.13.1" +val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.9" From 25db6adf823682737a22452ae6992e1a0072ca2d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 4 Sep 2021 05:14:08 +0200 Subject: [PATCH 213/306] Update scalafmt-core to 3.0.2 (#999) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index d30c20aaf..71c492f94 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.1 +version=3.0.2 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From 6211c4e83afd964a681e0d54d1a776b03e443cd7 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 11 Sep 2021 00:13:14 +0200 Subject: [PATCH 214/306] Update scalafmt-core to 3.0.3 (#1001) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 71c492f94..50199eb8b 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.2 +version=3.0.3 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From a8f3af758d1173f15883e6be368c15101a19e003 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 16 Sep 2021 22:34:46 +0200 Subject: [PATCH 215/306] Update sbt-scoverage to 1.9.0 (#1003) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index b43cebff2..9148b1626 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.8.2") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.30") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 723886ba90fe592c64eac16a0df74597546698ae Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 16 Sep 2021 21:49:25 +0100 Subject: [PATCH 216/306] Update scala to 2.12.15 (#1004) * Update scala-library, scala-reflect to 2.12.15 * Update CI Co-authored-by: Scala Steward --- .github/workflows/ci.yml | 4 ++-- build.sbt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5d0dda132..d3342c957 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.14 + - 2.12.15 - 2.13.6 test-coverage: runs-on: ubuntu-latest @@ -60,7 +60,7 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.14 + - 2.12.15 - 2.13.6 microsite: runs-on: ubuntu-latest diff --git a/build.sbt b/build.sbt index ce0335701..27f6f5818 100644 --- a/build.sbt +++ b/build.sbt @@ -39,7 +39,7 @@ crossScalaVersions := Nil val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.14", + scalaVersion := "2.12.15", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, From 98970910e9b4d762b68f15940f158c06bfa6ac30 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 17 Sep 2021 03:41:55 +0200 Subject: [PATCH 217/306] Update sbt-scalafix to 0.9.31 (#1005) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 9148b1626..8434171de 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.30") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.31") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") From 34feaf02e7239f9f150bbbd37d886c5c690c395c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 18 Sep 2021 08:49:19 +0200 Subject: [PATCH 218/306] Update scalatest to 3.2.10 (#1007) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 27f6f5818..2581a32cf 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.12" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.9" +val scalaTestVersion = "3.2.10" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.5.0" From 0c45f9395020ceecd28899fe156b7e3c92b29814 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 18 Sep 2021 08:49:29 +0200 Subject: [PATCH 219/306] Update scalafmt-core to 3.0.4 (#1006) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 50199eb8b..720a1b73e 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.3 +version=3.0.4 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From 34f832fe71a15f836f945b14c1c91bca7fad0045 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 23 Sep 2021 14:11:20 +0200 Subject: [PATCH 220/306] Update sbt-ci-release to 1.5.9 (#1009) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 8434171de..777122f3c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,4 +13,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.31") -addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") From a805061e7e8adf9f3f0f8b7533e4ad9b798cfde8 Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Fri, 24 Sep 2021 06:04:07 -1000 Subject: [PATCH 221/306] expose maximum number of items and ordering in PQ (#1008) --- .../mutable/PriorityQueueMonoid.scala | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala index 3d881eb22..fcfed8584 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueMonoid.scala @@ -26,17 +26,23 @@ import java.util.PriorityQueue */ class PriorityQueueMonoid[K](max: Int)(implicit ord: Ordering[K]) extends Monoid[PriorityQueue[K]] { - require(max > 0, "PriorityQueueMonoid requires keeping at least 1 item") + def maximumItems: Int = max + def ordering: Ordering[K] = ord + private[this] val revOrd: Ordering[K] = ord.reverse + + require(max > 0, s"PriorityQueueMonoid requires keeping at least 1 item, invalid max=$max") // Java throws if you try to make a queue size 0 protected val MINQUEUESIZE = 1 def build(k: K): PriorityQueue[K] = { - val q = new PriorityQueue[K](1, ord.reverse); + val q = new PriorityQueue[K](1, revOrd); q.add(k) q } def build(items: Iterable[K]): PriorityQueue[K] = { - val q = new PriorityQueue(items.size.max(MINQUEUESIZE), ord.reverse); - items.foreach { item => + val q = new PriorityQueue(items.size.max(MINQUEUESIZE), revOrd) + val it = items.iterator + while (it.hasNext) { + val item = it.next() if (q.size < max || ord.lteq(item, q.peek)) { q.add(item) } @@ -44,10 +50,15 @@ class PriorityQueueMonoid[K](max: Int)(implicit ord: Ordering[K]) extends Monoid limit(q) q } - protected def limit(q: PriorityQueue[K]): Unit = - while (q.size > max) { q.poll() } + protected def limit(q: PriorityQueue[K]): Unit = { + var excess = q.size - max + while (excess > 0) { + q.poll() + excess -= 1 + } + } - override def zero: PriorityQueue[K] = new PriorityQueue[K](MINQUEUESIZE, ord.reverse) + override def zero: PriorityQueue[K] = new PriorityQueue[K](MINQUEUESIZE, revOrd) override def isNonZero(q: PriorityQueue[K]): Boolean = q.size > 0 override def plus(left: PriorityQueue[K], right: PriorityQueue[K]): PriorityQueue[K] = { From d45c14f7c3ea28c4b02c97814de19e90fbef54c8 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 27 Sep 2021 00:59:38 +0200 Subject: [PATCH 222/306] Update scalafmt-core to 3.0.5 (#1010) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 720a1b73e..67fa3623c 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.4 +version=3.0.5 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From c8805726670aba4b819eff9a52fea3c94a9cb128 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 29 Sep 2021 21:04:26 +0200 Subject: [PATCH 223/306] Update JavaEWAH to 1.1.13 (#1011) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 2581a32cf..5b921e153 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" -val javaEwahVersion = "1.1.12" +val javaEwahVersion = "1.1.13" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From 06b588a36c5123fbd57fab46d9924bb1fd6ca310 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 4 Oct 2021 18:10:06 +0200 Subject: [PATCH 224/306] Update scalafmt-core to 3.0.6 (#1012) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 67fa3623c..ec5122b68 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.5 +version=3.0.6 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From 33873057f8b3b287952b5a7f840d1878229af3df Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 6 Oct 2021 22:50:27 +0200 Subject: [PATCH 225/306] Update sbt-mima-plugin to 1.0.1 (#1013) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 777122f3c..a01746968 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") From 984dd549312b4eccf83f3d5762ca29fca02533e9 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 11 Oct 2021 18:15:32 +0200 Subject: [PATCH 226/306] Update sbt-scoverage to 1.9.1 (#1014) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index a01746968..e4b915aef 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.0") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.31") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") From 707e0537102d7d4f128f1757735512a67114f38c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 14 Oct 2021 20:45:03 +0200 Subject: [PATCH 227/306] Update sbt-ci-release to 1.5.10 (#1015) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e4b915aef..f110f3c92 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -13,4 +13,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.1") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.31") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.9") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From ef239cadf01430a3328fc9bd961f3089c2916ab7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Oct 2021 15:03:58 +0100 Subject: [PATCH 228/306] Bump actions/checkout from 2.3.4 to 2.3.5 (#1016) Bumps [actions/checkout](https://github.com/actions/checkout) from 2.3.4 to 2.3.5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2.3.4...v2.3.5) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 10 +++++----- .github/workflows/release.yml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3342c957..a2dc4b60e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,7 +5,7 @@ jobs: checks: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.4 + - uses: actions/checkout@v2.3.5 - name: cache SBT uses: coursier/cache-action@v6 - name: Java 11 setup @@ -14,7 +14,7 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.4 + - uses: actions/checkout@v2.3.5 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -34,7 +34,7 @@ jobs: test-coverage: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.4 + - uses: actions/checkout@v2.3.5 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -45,7 +45,7 @@ jobs: mimaReport: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.4 + - uses: actions/checkout@v2.3.5 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -65,7 +65,7 @@ jobs: microsite: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.4 + - uses: actions/checkout@v2.3.5 - name: Ruby setup uses: actions/setup-ruby@v1.1.3 with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 04617f445..e80fb88ef 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ jobs: publish: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2.3.5 - uses: olafurpg/setup-scala@v13 - name: Publish ${{ github.ref }} run: sbt ci-release From 3a1070b348daf9e66733af1845f74131765c7dc5 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 21 Oct 2021 08:56:49 +0200 Subject: [PATCH 229/306] Update scalafmt-core to 3.0.7 (#1017) * Update scalafmt-core to 3.0.7 * Reformat with scalafmt 3.0.7 --- .scalafmt.conf | 2 +- .../src/main/scala/com/twitter/algebird/MomentsGroup.scala | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index ec5122b68..9729d0ad0 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.6 +version=3.0.7 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index 1aab47871..7f2e54734 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -31,9 +31,8 @@ import algebra.{CommutativeGroup, CommutativeMonoid} sealed class Moments(val m0D: Double, val m1: Double, val m2: Double, val m3: Double, val m4: Double) extends Product with Serializable { - def this(m0: Long, m1: Double, m2: Double, m3: Double, m4: Double) = { + def this(m0: Long, m1: Double, m2: Double, m3: Double, m4: Double) = this(m0.toDouble, m1, m2, m3, m4) - } def m0: Long = m0D.toLong From e58422835e8dc588c1937dea364b12b11a0ee1fb Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 24 Oct 2021 10:19:04 +0200 Subject: [PATCH 230/306] Update sbt-unidoc to 0.5.0 (#1018) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index f110f3c92..cdc158731 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ resolvers ++= Seq( ) addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") -addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") +addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") From 68f80682441fd8f25704bf27071cd7c6d072bd5f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 26 Oct 2021 10:02:39 +0200 Subject: [PATCH 231/306] Update junit-interface to 0.13.2 (#1019) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 5b921e153..30ecdf893 100644 --- a/build.sbt +++ b/build.sbt @@ -72,7 +72,7 @@ val sharedSettings = Seq( javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), libraryDependencies ++= Seq( "junit" % "junit" % "4.13.2" % Test, - "com.novocode" % "junit-interface" % "0.11" % Test + "com.github.sbt" % "junit-interface" % "0.13.2" % Test ), // Publishing options: pomIncludeRepository := { x => false }, From 8ee9e2c4ca434c50c218daf96999b5d0a508dd6d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 1 Nov 2021 17:35:27 +0100 Subject: [PATCH 232/306] Update scalafmt-core to 3.0.8 (#1021) --- .scalafmt.conf | 2 +- .../twitter/algebird/benchmark/HLLPresentBenchmark.scala | 2 +- .../src/main/scala/com/twitter/algebird/Eventually.scala | 2 +- .../src/main/scala/com/twitter/algebird/First.scala | 2 +- .../src/main/scala/com/twitter/algebird/Interval.scala | 2 +- .../src/main/scala/com/twitter/algebird/Monoid.scala | 2 +- .../src/main/scala/com/twitter/algebird/QTree.scala | 8 ++++---- .../src/main/scala/com/twitter/algebird/Semigroup.scala | 8 ++++---- .../src/main/scala/com/twitter/algebird/SketchMap.scala | 2 +- .../src/main/scala/com/twitter/algebird/SpaceSaver.scala | 4 ++-- .../main/scala/com/twitter/algebird/scalacheck/Gen.scala | 2 +- .../test/scala/com/twitter/algebird/HyperLogLogTest.scala | 2 +- .../test/scala/com/twitter/algebird/SpaceSaverTest.scala | 2 +- .../com/twitter/algebird/util/summer/AsyncMapSum.scala | 2 +- .../algebird/util/summer/HeavyHittersCachingSummer.scala | 2 +- 15 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 9729d0ad0..7c0a60f39 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.7 +version=3.0.8 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala index b0eee084f..e9f34ddcb 100644 --- a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/HLLPresentBenchmark.scala @@ -33,7 +33,7 @@ object HLLPresentBenchmark { class HLLPresentBenchmark { import HLLPresentBenchmark._ - //don't cache the lazy values + // don't cache the lazy values def clone(hll: HLL): HLL = hll match { case SparseHLL(bits, maxRhow) => SparseHLL(bits, maxRhow) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index 78548d55f..ea4c8d8d6 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -198,7 +198,7 @@ trait AbstractEventuallyAggregator[A, E, O, C] extends Aggregator[A, Either[E, O trait EventuallyAggregator[A, E, O, C] extends AbstractEventuallyAggregator[A, E, O, C] { - //avoid init order issues and cyclical references + // avoid init order issues and cyclical references @transient override lazy val semigroup: EventuallySemigroup[E, O] = new EventuallySemigroup[E, O](convert)(mustConvert)(leftSemigroup, rightAggregator.semigroup) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/First.scala b/algebird-core/src/main/scala/com/twitter/algebird/First.scala index a1238b4f8..115c2e56b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/First.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/First.scala @@ -33,7 +33,7 @@ case class First[@specialized(Int, Long, Float, Double) +T](get: T) { * ignored instance of `First[U]` */ def +[U >: T](r: First[U]): First[T] = { - val _ = r //suppressing unused `r` + val _ = r // suppressing unused `r` this } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala index ea17861c7..eb9edbc2f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Interval.scala @@ -129,7 +129,7 @@ object Interval extends java.io.Serializable { case Intersection(ExclusiveLower(l), InclusiveUpper(u)) => !succ.next(l).exists(succ.ordering.lteq(_, u)) case InclusiveLower(_) => false // we at least have l - case InclusiveUpper(_) => false //false // we at least have u + case InclusiveUpper(_) => false // false // we at least have u case ExclusiveLower(l) => succ.next(l).isEmpty case ExclusiveUpper(u) => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala index b004d7b07..9698dd999 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala @@ -141,7 +141,7 @@ class SeqMonoid[T] extends Monoid[Seq[T]] { */ class ArrayMonoid[T: ClassTag](implicit semi: Semigroup[T]) extends Monoid[Array[T]] { - //additive identity + // additive identity override def isNonZero(v: Array[T]): Boolean = v.nonEmpty override def zero: Array[T] = Array[T]() diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index fd21ef7bf..774e1ac7c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -64,7 +64,7 @@ object QTree { offset: Long, level: Int, count: Long, - sum: A, //the sum at just this node (*not* including its children) + sum: A, // the sum at just this node (*not* including its children) lowerChild: Option[QTree[A]], upperChild: Option[QTree[A]] ): QTree[A] = { @@ -164,10 +164,10 @@ class QTreeSemigroup[A](k: Int)(implicit val underlyingMonoid: Monoid[A]) extend } class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( - _sum: A, //the sum at just this node (*not* including its children) - _offset: Long, //the range this tree covers is offset*(2^level) ... (offset+1)*(2^level) + _sum: A, // the sum at just this node (*not* including its children) + _offset: Long, // the range this tree covers is offset*(2^level) ... (offset+1)*(2^level) _level: Int, - _count: Long, //the total count for this node and all of its children + _count: Long, // the total count for this node and all of its children _lowerChildNullable: QTree[A], _upperChildNullable: QTree[A] ) extends scala.Product6[Long, Int, Long, A, Option[QTree[A]], Option[QTree[A]]] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala index c4a5550f2..3e3d0cfde 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Semigroup.scala @@ -91,17 +91,17 @@ class EitherSemigroup[L, R](implicit semigroupl: Semigroup[L], semigroupr: Semig if (l.isLeft) { // l is Left, r may or may not be: if (r.isRight) { - //Avoid the allocation: + // Avoid the allocation: l } else { - //combine the lefts: + // combine the lefts: Left(semigroupl.plus(l.left.get, r.left.get)) } } else if (r.isLeft) { - //l is not a Left value, so just return right: + // l is not a Left value, so just return right: r } else { - //both l and r are Right values: + // both l and r are Right values: Right(semigroupr.plus(l.right.get, r.right.get)) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala index ae40402b4..f5973c338 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala @@ -82,7 +82,7 @@ class SketchMapMonoid[K, V](val params: SketchMapParams[K])(implicit if (buffer.size > maxBuffer) sumBuffer() buffer += sm } - if (buffer.size > 1) sumBuffer() //don't bother to sum if there is only one item. + if (buffer.size > 1) sumBuffer() // don't bother to sum if there is only one item. Some(buffer(0)) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 0eca7c557..43e53c250 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -39,7 +39,7 @@ object SpaceSaver { case SSOne(capacity, item) => val itemAsBytes = tSerializer(item) val itemLength = itemAsBytes.length - //1 for the type, 4 for capacity, 4 for itemAsBytes.length + // 1 for the type, 4 for capacity, 4 for itemAsBytes.length val buffer = new Array[Byte](1 + 4 + 4 + itemLength) ByteBuffer .wrap(buffer) @@ -53,7 +53,7 @@ object SpaceSaver { capacity, counters, _ - ) => //We do not care about the buckets are thery are created by SSMany.apply + ) => // We do not care about the buckets are thery are created by SSMany.apply val buffer = scala.collection.mutable.ArrayBuffer.newBuilder[Byte] buffer += (2: Byte) diff --git a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala index 20e6f08f1..f9cbd0d11 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/scalacheck/Gen.scala @@ -113,7 +113,7 @@ object gen extends ExpHistGen with IntervalGen { (5, genUncorr), (1, genRandom), (1, CorrelationMonoid.zero), - (1, genSum) //, + (1, genSum) // , ) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index 0e44cb8f1..ea3ed65f9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -281,7 +281,7 @@ class SetSizeAggregatorProperties extends ApproximateProperties("SetSizeAggregat class HyperLogLogTest extends AnyWordSpec with Matchers { - import HyperLogLog._ //Get the implicit int2bytes, long2Bytes + import HyperLogLog._ // Get the implicit int2bytes, long2Bytes val r: ju.Random = new java.util.Random diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala index adce742b7..922ad8534 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SpaceSaverTest.scala @@ -53,7 +53,7 @@ class SpaceSaverLaws extends CheckProperties { try { val fromBytes = SpaceSaver.fromBytes(a, SpaceSaverTest.arrayByteToString) - //We check that `fromBytes` doesn't yield exceptions + // We check that `fromBytes` doesn't yield exceptions fromBytes.isFailure || fromBytes.isSuccess } catch { case _: OutOfMemoryError => diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala index 857ebe310..731f65de6 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala @@ -63,7 +63,7 @@ class AsyncMapSum[Key, Value]( val curData = Semigroup.sumOption(vals.map(Map(_))).getOrElse(Map.empty) if (!queue.offer(curData)) { flush.map { flushRes => - sizeIncr.incr //todo not sure if need to increase size + sizeIncr.incr // todo not sure if need to increase size Semigroup.plus(flushRes, curData) } } else { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala index 6b74fe282..bd0e9e2a8 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala @@ -246,7 +246,7 @@ class HeavyHittersCachingSummer[K, V]( new ApproxHHTracker(hhPct, updateFreq, roFreq) def addAll(vals: TraversableOnce[T]): Future[Iterable[T]] = { - //todo not sure if need to increment as backing summer may already be doing it + // todo not sure if need to increment as backing summer may already be doing it insertOp.incr val (hh, nonHH) = approxHH.splitTraversableOnce(vals, { t: T => t._1.hashCode }) From 160c9cfe25e8bca7306133bb94974ff0a1a571b6 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 2 Nov 2021 13:46:21 +0100 Subject: [PATCH 233/306] Update scala-library, scala-reflect to 2.13.7 (#1022) --- build.sbt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index 30ecdf893..0bace9aff 100644 --- a/build.sbt +++ b/build.sbt @@ -209,7 +209,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.6", + crossScalaVersions += "2.13.7", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -239,7 +239,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( Test / testOptions ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.6", + crossScalaVersions += "2.13.7", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -270,14 +270,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.6", + crossScalaVersions += "2.13.7", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.6", + crossScalaVersions += "2.13.7", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -294,7 +294,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.6", + crossScalaVersions += "2.13.7", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.7", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 6f69d83f594ab8fe79604e6a0f4e71055710432d Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Wed, 3 Nov 2021 16:18:14 +0000 Subject: [PATCH 234/306] Update github-action scala 2.13 version (#1023) --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a2dc4b60e..4416559da 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: scala: - 2.11.12 - 2.12.15 - - 2.13.6 + - 2.13.7 test-coverage: runs-on: ubuntu-latest steps: @@ -61,7 +61,7 @@ jobs: scala: - 2.11.12 - 2.12.15 - - 2.13.6 + - 2.13.7 microsite: runs-on: ubuntu-latest steps: From 6b80ea25b035e090b2987ec5ef963a9d24dbbe03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Nov 2021 16:18:51 +0000 Subject: [PATCH 235/306] Bump actions/checkout from 2.3.5 to 2.4.0 (#1025) Bumps [actions/checkout](https://github.com/actions/checkout) from 2.3.5 to 2.4.0. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2.3.5...v2.4.0) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 10 +++++----- .github/workflows/release.yml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4416559da..9b5ef514c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,7 +5,7 @@ jobs: checks: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.5 + - uses: actions/checkout@v2.4.0 - name: cache SBT uses: coursier/cache-action@v6 - name: Java 11 setup @@ -14,7 +14,7 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.5 + - uses: actions/checkout@v2.4.0 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -34,7 +34,7 @@ jobs: test-coverage: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.5 + - uses: actions/checkout@v2.4.0 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -45,7 +45,7 @@ jobs: mimaReport: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.5 + - uses: actions/checkout@v2.4.0 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -65,7 +65,7 @@ jobs: microsite: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.5 + - uses: actions/checkout@v2.4.0 - name: Ruby setup uses: actions/setup-ruby@v1.1.3 with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e80fb88ef..de425193b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ jobs: publish: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.3.5 + - uses: actions/checkout@v2.4.0 - uses: olafurpg/setup-scala@v13 - name: Publish ${{ github.ref }} run: sbt ci-release From 1e8ba3e1e26e0b5f1f07896307ee16b3211bbb71 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 4 Nov 2021 16:19:18 +0100 Subject: [PATCH 236/306] Update sbt-scoverage to 1.9.2 (#1024) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index cdc158731..b65057255 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.1") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.31") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 4723c5fc51db13eb9057a897517ba1f919d02555 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 9 Nov 2021 16:59:49 +0100 Subject: [PATCH 237/306] Update scala-collection-compat to 2.6.0 (#1026) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0bace9aff..92efc8b9c 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.10" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.5.0" +val scalaCollectionCompat = "2.6.0" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From 0f9dfbd234315a64386f167cee77d56089d4093d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 14 Nov 2021 08:38:21 +0100 Subject: [PATCH 238/306] Update sbt-scalafix to 0.9.32 (#1028) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index b65057255..77833bb87 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.31") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.32") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From ae256cfed53f029d16d90af4523cea815623df14 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 19 Nov 2021 12:04:01 +0100 Subject: [PATCH 239/306] Update sbt-scalafmt to 2.4.4 (#1030) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 77833bb87..d51f60dda 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,7 +7,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.3") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.4") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.2") From 47d6ed0908848ddffd621c7269915bbb889ca7f8 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 22 Nov 2021 19:23:11 +0100 Subject: [PATCH 240/306] Update scalafmt-core to 3.1.2 (#1031) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 7c0a60f39..f5adace81 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.0.8 +version=3.1.2 maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From b8c70a4b422e11c360ecfc80f47b57a7cb5b458d Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Fri, 10 Dec 2021 18:20:13 +0000 Subject: [PATCH 241/306] Update scalafmt with runner.dialect (#1036) --- .scalafmt.conf | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.scalafmt.conf b/.scalafmt.conf index f5adace81..e369f7a63 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,10 @@ version=3.1.2 +runner.dialect = scala212 +fileOverride { + "glob:**/scala-2.13*/**" { + runner.dialect = scala213 + } +} maxColumn = 110 docstrings.style = Asterisk newlines.alwaysBeforeMultilineDef = false From 12aae42116d8bae5e361aa6db7b8963a03c74dda Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 10 Dec 2021 23:18:54 +0100 Subject: [PATCH 242/306] Update scalafmt-core to 3.2.1 (#1034) * Update scalafmt-core to 3.2.1 * Update scalafmt-core to 3.2.1 --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index e369f7a63..8ea9a874b 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.1.2 +version=3.2.1 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { From 2a020bdd50cb85a329dee5914841eda3f07b50aa Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 15 Dec 2021 17:33:02 +0100 Subject: [PATCH 243/306] Update sbt to 1.5.7 (#1038) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 10fd9eee0..baf5ff3ec 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.5 +sbt.version=1.5.7 From aadc719ee975b9c645ed39bf4d770236bf59e911 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 24 Dec 2021 03:38:02 +0100 Subject: [PATCH 244/306] Update scalafmt-core to 3.2.2 (#1040) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 8ea9a874b..41ea139fd 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.2.1 +version=3.2.2 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { From 5a947cba8cfa39f68f5438678631f199beebb5b5 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 24 Dec 2021 09:39:26 +0100 Subject: [PATCH 245/306] Update sbt to 1.5.8 (#1039) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index baf5ff3ec..e64c208ff 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.7 +sbt.version=1.5.8 From e7dd8d5246f77d3a091842bb00d86c5df04a9acc Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 26 Dec 2021 09:58:17 +0100 Subject: [PATCH 246/306] Update sbt-scalafmt to 2.4.6 (#1041) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index d51f60dda..dab1cc117 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,7 +7,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.4") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.2") From 9bec2284dfae033bc2d37d3014e3f8e7a7fce604 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 27 Dec 2021 06:56:01 +0100 Subject: [PATCH 247/306] Update sbt to 1.6.0 (#1043) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index e64c208ff..1e70b0c1c 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.8 +sbt.version=1.6.0 From caea538651b66dacfe16faf29af6b3c2f22d4191 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 29 Dec 2021 17:23:41 +0100 Subject: [PATCH 248/306] Update sbt to 1.6.1 (#1044) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 1e70b0c1c..3161d2146 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.6.0 +sbt.version=1.6.1 From 295424866297a0933d577a4fe56c40cfae109949 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 30 Dec 2021 08:08:28 +0100 Subject: [PATCH 249/306] Update junit-interface to 0.13.3 (#1045) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 92efc8b9c..f00574264 100644 --- a/build.sbt +++ b/build.sbt @@ -72,7 +72,7 @@ val sharedSettings = Seq( javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), libraryDependencies ++= Seq( "junit" % "junit" % "4.13.2" % Test, - "com.github.sbt" % "junit-interface" % "0.13.2" % Test + "com.github.sbt" % "junit-interface" % "0.13.3" % Test ), // Publishing options: pomIncludeRepository := { x => false }, From 3f29405a2b94db3f6b38d94599b2c07132bc6b4e Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 2 Jan 2022 14:14:34 +0100 Subject: [PATCH 250/306] Update scalafmt-core to 3.3.1 (#1046) * Update scalafmt-core to 3.3.1 * Reformat with scalafmt 3.3.1 --- .scalafmt.conf | 2 +- .../scala/com/twitter/algebird/DecayingCMS.scala | 16 ++++++++-------- .../com/twitter/algebird/InvariantAlgebras.scala | 6 +++--- .../com/twitter/algebird/MomentsGroup.scala | 8 ++++---- .../com/twitter/algebird/generic/Instances.scala | 4 ++-- .../scala/com/twitter/algebird/SetDiffTest.scala | 2 +- .../algebird/util/PromiseLinkMonoid.scala | 2 +- 7 files changed, 20 insertions(+), 20 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 41ea139fd..b7f5911ec 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.2.2 +version=3.3.1 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala index 62ac2b5cb..2b6a5f157 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala @@ -290,15 +290,15 @@ final class DecayingCMS[K]( any match { case that: DecayingCMS[_]#CMS => this.logScale == that.logScale && - this.timeInHL == that.timeInHL && - this.cells.length == that.cells.length && { - var i = 0 - while (i < depth) { - if (this.cells(i) != that.cells(i)) return false - i += 1 - } - true + this.timeInHL == that.timeInHL && + this.cells.length == that.cells.length && { + var i = 0 + while (i < depth) { + if (this.cells(i) != that.cells(i)) return false + i += 1 } + true + } case _ => false } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/InvariantAlgebras.scala b/algebird-core/src/main/scala/com/twitter/algebird/InvariantAlgebras.scala index 9f8a3df09..5f517e82e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/InvariantAlgebras.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/InvariantAlgebras.scala @@ -16,9 +16,9 @@ class InvariantSemigroup[T, U](val forward: T => U, val reverse: U => T)(implici that match { case r: InvariantSemigroup[_, _] => (hashCode == r.hashCode) && - (forward == r.forward) && - (reverse == r.reverse) && - (semigroup == r.semigroup) + (forward == r.forward) && + (reverse == r.reverse) && + (semigroup == r.semigroup) case _ => false } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index 7f2e54734..a5f8a57b2 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -82,10 +82,10 @@ sealed class Moments(val m0D: Double, val m1: Double, val m2: Double, val m3: Do that match { case thatM: Moments => (m0D == thatM.m0D) && - (m1 == thatM.m1) && - (m2 == thatM.m2) && - (m3 == thatM.m3) && - (m4 == thatM.m4) + (m1 == thatM.m1) && + (m2 == thatM.m2) && + (m3 == thatM.m3) && + (m4 == thatM.m4) case _ => false } diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala index 34a67ceaa..990e07ee3 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala @@ -123,8 +123,8 @@ class HConsSemigroup[A, B <: HList](protected val a: Semigroup[A], protected val that match { case hcs: HConsSemigroup[_, _] => (hashCode == hcs.hashCode) && - (a == hcs.a) && - (b == hcs.b) + (a == hcs.a) && + (b == hcs.b) case _ => false } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala index 7d95ba3ce..183b5f344 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SetDiffTest.scala @@ -91,7 +91,7 @@ class SetDiffTest extends AnyWordSpec with Matchers with Checkers { a.strictApply(set) match { case None => (a.remove.diff(set).nonEmpty || (a.add & set).nonEmpty) && - (a.invert(a(set)) != set) // invert only succeeds when strictApply does + (a.invert(a(set)) != set) // invert only succeeds when strictApply does /* * And if it DOES succeed it inverts! * Note that this law is not true: diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala index 112945300..7be81988f 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/PromiseLinkMonoid.scala @@ -23,7 +23,7 @@ import com.twitter.util.Promise * general version of the TunnelMonoid. See the documentation for TunnelMonoid for general motivation. NOTE: * the Promise will be fulfilled with the value just before the PromiseLink is calculated. */ -class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { //TODo(jcoveney) rename PromiseLink +class PromiseLinkMonoid[V](monoid: Monoid[V]) extends Monoid[PromiseLink[V]] { // TODo(jcoveney) rename PromiseLink def zero: PromiseLink[V] = PromiseLink(new Promise, monoid.zero) def plus(older: PromiseLink[V], newer: PromiseLink[V]): PromiseLink[V] = { From 7101c12349468b2f5bdbe7b6bbee91cbfd9614df Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 11 Jan 2022 18:32:16 +0100 Subject: [PATCH 251/306] Update sbt-scoverage to 1.9.3 (#1047) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index dab1cc117..e27ed36e7 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.2") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.32") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 0861d71dd58550fb5665850ac3b535a9d552676c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 12 Jan 2022 14:19:41 +0100 Subject: [PATCH 252/306] Update sbt-scalafix to 0.9.34 (#1048) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e27ed36e7..69feeebd1 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.32") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.34") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From d96093b8f376cec534927c8bc708f68cf12aa650 Mon Sep 17 00:00:00 2001 From: Sam Ritchie Date: Wed, 12 Jan 2022 12:30:46 -0700 Subject: [PATCH 253/306] more efficient moments (#1049) --- .../com/twitter/algebird/MomentsGroup.scala | 40 ++++++++++--------- 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index a5f8a57b2..b0a31e523 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -208,21 +208,23 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { if (countCombined == 0.0) zero else { val delta = b.mean - a.mean + val delta_n = delta / countCombined + val delta_n2 = delta_n * delta_n + val delta_n3 = delta_n2 * delta_n + val meanCombined = Moments.getCombinedMeanDouble(a.m0D, a.mean, b.m0D, b.mean) - val m2 = a.m2 + b.m2 + - math.pow(delta, 2) * a.m0D * b.m0D / countCombined + val m2 = a.m2 + b.m2 + delta * delta_n * a.m0D * b.m0D val m3 = a.m3 + b.m3 + - math.pow(delta, 3) * a.m0D * b.m0D * (a.m0D - b.m0D) / math.pow(countCombined, 2) + - 3 * delta * (a.m0D * b.m2 - b.m0D * a.m2) / countCombined + delta * delta_n2 * a.m0D * b.m0D * (a.m0D - b.m0D) + + 3 * delta_n * (a.m0D * b.m2 - b.m0D * a.m2) val m4 = a.m4 + b.m4 + - math.pow(delta, 4) * a.m0D * b.m0D * (math.pow(a.m0D, 2) - - a.m0D * b.m0D + math.pow(b.m0D, 2)) / math.pow(countCombined, 3) + - 6 * math.pow(delta, 2) * (math.pow(a.m0D, 2) * b.m2 + - math.pow(b.m0D, 2) * a.m2) / math.pow(countCombined, 2) + - 4 * delta * (a.m0D * b.m3 - b.m0D * a.m3) / countCombined + delta * delta_n3 * a.m0D * b.m0D * (math.pow(a.m0D, 2) - + a.m0D * b.m0D + math.pow(b.m0D, 2)) + + 6 * delta_n2 * (math.pow(a.m0D, 2) * b.m2 + math.pow(b.m0D, 2) * a.m2) + + 4 * delta_n * (a.m0D * b.m3 - b.m0D * a.m3) new Moments(countCombined, meanCombined, m2, m3, m4) } @@ -260,21 +262,23 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { m4 = 0.0 } else { val delta = b.mean - mean + val delta_n = delta / countCombined + val delta_n2 = delta_n * delta_n + val delta_n3 = delta_n2 * delta_n + val meanCombined = Moments.getCombinedMeanDouble(count, mean, b.m0D, b.mean) - val m2Combined = m2 + b.m2 + - math.pow(delta, 2) * count * b.m0D / countCombined + val m2Combined = m2 + b.m2 + delta * delta_n * count * b.m0D val m3Combined = m3 + b.m3 + - math.pow(delta, 3) * count * b.m0D * (count - b.m0D) / math.pow(countCombined, 2) + - 3 * delta * (count * b.m2 - b.m0D * m2) / countCombined + delta * delta_n2 * count * b.m0D * (count - b.m0D) + + 3 * delta_n * (count * b.m2 - b.m0D * m2) val m4Combined = m4 + b.m4 + - math.pow(delta, 4) * count * b.m0D * (math.pow(count, 2) - - count * b.m0D + math.pow(b.m0D, 2)) / math.pow(countCombined, 3) + - 6 * math.pow(delta, 2) * (math.pow(count, 2) * b.m2 + - math.pow(b.m0D, 2) * m2) / math.pow(countCombined, 2) + - 4 * delta * (count * b.m3 - b.m0D * m3) / countCombined + delta * delta_n3 * count * b.m0D * (math.pow(count, 2) - + count * b.m0D + math.pow(b.m0D, 2)) + + 6 * delta_n2 * (math.pow(count, 2) * b.m2 + math.pow(b.m0D, 2) * m2) + + 4 * delta_n * (count * b.m3 - b.m0D * m3) mean = meanCombined m2 = m2Combined From c91b3a5f5f79a4ed830d59ffc677e4c9b9687e54 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 13 Jan 2022 23:11:49 +0100 Subject: [PATCH 254/306] Update scala-library, scala-reflect to 2.13.8 (#1051) --- build.sbt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index f00574264..25d4aa666 100644 --- a/build.sbt +++ b/build.sbt @@ -209,7 +209,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.7", + crossScalaVersions += "2.13.8", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -239,7 +239,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( Test / testOptions ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.7", + crossScalaVersions += "2.13.8", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -270,14 +270,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.7", + crossScalaVersions += "2.13.8", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.7", + crossScalaVersions += "2.13.8", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -294,7 +294,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.7", + crossScalaVersions += "2.13.8", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.7", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From a84caa0d489d323d05676c787ca7abdf037de2cc Mon Sep 17 00:00:00 2001 From: Sam Ritchie Date: Thu, 13 Jan 2022 16:55:13 -0700 Subject: [PATCH 255/306] Mutable MomentsState, more efficient + for double added to Moments (#1050) --- .../algebird/benchmark/MomentsBenchmark.scala | 52 +++++ .../com/twitter/algebird/MomentsGroup.scala | 208 +++++++++++++----- .../com/twitter/algebird/MomentsLaws.scala | 40 +++- build.sbt | 2 +- 4 files changed, 239 insertions(+), 63 deletions(-) create mode 100644 algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/MomentsBenchmark.scala diff --git a/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/MomentsBenchmark.scala b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/MomentsBenchmark.scala new file mode 100644 index 000000000..1238268ad --- /dev/null +++ b/algebird-benchmark/src/main/scala/com/twitter/algebird/benchmark/MomentsBenchmark.scala @@ -0,0 +1,52 @@ +package com.twitter.algebird +package benchmark + +import scala.util.Random + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +object MomentsBenchmark { + @State(Scope.Benchmark) + class MomentsState { + @Param(Array("10000")) + var numElements: Int = 0 + + var inputData: Seq[Double] = _ + var inputMoments: Seq[Moments] = _ + + @Setup(Level.Trial) + def setup(): Unit = { + inputData = Seq.fill(numElements)(Random.nextInt(1000).toLong) + inputMoments = inputData.map(Moments(_)) + } + } +} + +class MomentsBenchmark { + import MomentsBenchmark._ + + @Benchmark + def timePlusDoubles(state: MomentsState, bh: Blackhole): Unit = + bh.consume( + state.inputData.foldLeft(Moments.momentsMonoid.zero)(_ + _) + ) + + @Benchmark + def timePlusMoments(state: MomentsState, bh: Blackhole): Unit = + bh.consume( + state.inputMoments.foldLeft(Moments.momentsMonoid.zero)(_ + _) + ) + + @Benchmark + def timeSumOption(state: MomentsState, bh: Blackhole): Unit = + bh.consume(Moments.momentsMonoid.sumOption(state.inputMoments)) + + @Benchmark + def timeFold(state: MomentsState, bh: Blackhole): Unit = + bh.consume(Moments.fold.overTraversable(state.inputData)) + + @Benchmark + def timeAggregate(state: MomentsState, bh: Blackhole): Unit = + bh.consume(Moments.aggregator(state.inputData)) +} diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index b0a31e523..733efb9ed 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -50,10 +50,54 @@ sealed class Moments(val m0D: Double, val m1: Double, val m2: Double, val m3: Do def stddev: Double = math.sqrt(variance) def skewness: Double = - math.sqrt(m0D) * m3 / math.pow(m2, 1.5) + m3 / (m2 * stddev) def kurtosis: Double = - m0D * m4 / math.pow(m2, 2) - 3 + m0D * m4 / (m2 * m2) - 3 + + /** + * Combines this instance with another [[Moments]] instance. + * @param b + * the other instance + * @return + * a [[Moments]] instances representing the combined moments of this instance and `b` + */ + def +(b: Moments): Moments = Moments.momentsMonoid.plus(this, b) + + /** + * Returns a new [[Moments]] instance generated by merging in the new observation `b`. + * @param b + * a new observation + * @return + * a [[Moments]] instance representing the combined moments of this instance and `b`. + */ + def +(b: Double): Moments = { + val n = m0D + 1 + val delta = b - mean + val delta_n = delta / n + val delta_n2 = delta_n * delta_n + val term1 = delta * delta_n * m0D + + val meanCombined = Moments.getCombinedMeanDouble(m0D, mean, 1.0, b) + val m2combined = m2 + term1 + val m3combined = m3 + term1 * delta_n * (n - 2) - 3 * delta_n * m2 + val m4combined = m4 + term1 * delta_n2 * (n * n - 3 * n + 3) + + 6 * delta_n2 * m2 - 4 * delta_n * m3 + + new Moments(n, meanCombined, m2combined, m3combined, m4combined) + } + + /** + * Returns a [[Fold]] instance that uses `+` to accumulate deltas into this [[Moments]] instance. + */ + def fold: Fold[Double, Moments] = + Fold.foldMutable[Moments.MomentsState, Double, Moments]( + { case (state, x) => + state += x + }, + _ => Moments.MomentsState.fromMoments(this), + (state: Moments.MomentsState) => state.toMoments + ) override def productArity: Int = 5 override def productElement(idx: Int): Any = @@ -102,6 +146,96 @@ sealed class Moments(val m0D: Double, val m1: Double, val m2: Double, val m3: Do } object Moments { + final class MomentsState( + var count: Double, + var mean: Double, + var m2: Double, + var m3: Double, + var m4: Double + ) { + + def +=(b: Moments): this.type = { + /* + * Unfortunately we copy the code from the monoid's plus implementation, + * but we do it to avoid allocating a new Moments on every item in the + * loop. the Monoid laws test that sum matches looping on plus + */ + val countCombined = count + b.m0D + + if (countCombined == 0.0) { + mean = 0.0 + m2 = 0.0 + m3 = 0.0 + m4 = 0.0 + } else { + val delta = b.mean - mean + val delta_n = delta / countCombined + val delta_n2 = delta_n * delta_n + val delta_n3 = delta_n2 * delta_n + val count_sq = count * count + val rn_sq = b.m0D * b.m0D + + val meanCombined = Moments.getCombinedMeanDouble(count, mean, b.m0D, b.mean) + + val m2Combined = m2 + b.m2 + delta * delta_n * count * b.m0D + + val m3Combined = m3 + b.m3 + + delta * delta_n2 * count * b.m0D * (count - b.m0D) + + 3 * delta_n * (count * b.m2 - b.m0D * m2) + + val m4Combined = m4 + b.m4 + + delta * delta_n3 * count * b.m0D * + (count_sq - count * b.m0D + rn_sq) + + 6 * delta_n2 * (count_sq * b.m2 + rn_sq * m2) + + 4 * delta_n * (count * b.m3 - b.m0D * m3) + + mean = meanCombined + m2 = m2Combined + m3 = m3Combined + m4 = m4Combined + } + + count = countCombined + this + } + + def +=(b: Double): this.type = { + val prevCount = count + count += 1 + + val delta = b - mean + val delta_n = delta / count + val delta_n2 = delta_n * delta_n + val term1 = delta * delta_n * prevCount + + mean = Moments.getCombinedMeanDouble(prevCount, mean, 1.0, b) + m4 += term1 * delta_n2 * (count * count - 3 * count + 3) + + 6 * delta_n2 * m2 - 4 * delta_n * m3 + m3 += term1 * delta_n * (count - 2) - 3 * delta_n * m2 + m2 += term1 + this + } + + def toMoments: Moments = new Moments(count, mean, m2, m3, m4) + + def resetFromMoments(m: Moments): this.type = { + count = m.m0D + mean = m.m1 + m2 = m.m2 + m3 = m.m3 + m4 = m.m4 + this + } + } + + object MomentsState { + def fromMoments(m: Moments): MomentsState = + new MomentsState(m.m0D, m.m1, m.m2, m.m3, m.m4) + + def newEmpty(): MomentsState = + new MomentsState(0.0, 0.0, 0.0, 0.0, 0.0) + } + @deprecated("use monoid[Moments], this isn't lawful for negate", "0.13.8") def group: Group[Moments] with CommutativeGroup[Moments] = MomentsGroup @@ -111,6 +245,8 @@ object Moments { val aggregator: MomentsAggregator.type = MomentsAggregator + val fold: Fold[Double, Moments] = momentsMonoid.zero.fold + def numericAggregator[N](implicit num: Numeric[N]): MonoidAggregator[N, Moments, Moments] = Aggregator.prepareMonoid { n: N => Moments(num.toDouble(n)) } @@ -211,6 +347,8 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { val delta_n = delta / countCombined val delta_n2 = delta_n * delta_n val delta_n3 = delta_n2 * delta_n + val ln_sq = a.m0D * a.m0D + val rn_sq = b.m0D * b.m0D val meanCombined = Moments.getCombinedMeanDouble(a.m0D, a.mean, b.m0D, b.mean) @@ -221,9 +359,8 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { 3 * delta_n * (a.m0D * b.m2 - b.m0D * a.m2) val m4 = a.m4 + b.m4 + - delta * delta_n3 * a.m0D * b.m0D * (math.pow(a.m0D, 2) - - a.m0D * b.m0D + math.pow(b.m0D, 2)) + - 6 * delta_n2 * (math.pow(a.m0D, 2) * b.m2 + math.pow(b.m0D, 2) * a.m2) + + delta * delta_n3 * a.m0D * b.m0D * (ln_sq - a.m0D * b.m0D + rn_sq) + + 6 * delta_n2 * (ln_sq * b.m2 + rn_sq * a.m2) + 4 * delta_n * (a.m0D * b.m3 - b.m0D * a.m3) new Moments(countCombined, meanCombined, m2, m3, m4) @@ -234,62 +371,19 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { if (items.isEmpty) None else { val iter = items.toIterator - val init = iter.next() - var count: Double = init.m0D - var mean: Double = init.mean - var m2: Double = init.m2 - var m3: Double = init.m3 - var m4: Double = init.m4 - - while (iter.hasNext) { - - /* - * Unfortunately we copy the code in plus, but we do - * it to avoid allocating a new Moments on every item - * in the loop. the Monoid laws test that sum - * matches looping on plus - */ - val b = iter.next() - - val countCombined = count + b.m0D - - if (countCombined == 0.0) { - mean = 0.0 - m2 = 0.0 - m3 = 0.0 - m4 = 0.0 - } else { - val delta = b.mean - mean - val delta_n = delta / countCombined - val delta_n2 = delta_n * delta_n - val delta_n3 = delta_n2 * delta_n - - val meanCombined = Moments.getCombinedMeanDouble(count, mean, b.m0D, b.mean) - - val m2Combined = m2 + b.m2 + delta * delta_n * count * b.m0D - - val m3Combined = m3 + b.m3 + - delta * delta_n2 * count * b.m0D * (count - b.m0D) + - 3 * delta_n * (count * b.m2 - b.m0D * m2) - - val m4Combined = m4 + b.m4 + - delta * delta_n3 * count * b.m0D * (math.pow(count, 2) - - count * b.m0D + math.pow(b.m0D, 2)) + - 6 * delta_n2 * (math.pow(count, 2) * b.m2 + math.pow(b.m0D, 2) * m2) + - 4 * delta_n * (count * b.m3 - b.m0D * m3) - - mean = meanCombined - m2 = m2Combined - m3 = m3Combined - m4 = m4Combined + // If there is only a single item, skip the MomentsState instantiation and + // return it. + if (!iter.hasNext) { + Some(init) + } else { + val state = Moments.MomentsState.fromMoments(init) + while (iter.hasNext) { + state += iter.next() } - - count = countCombined + Some(state.toMoments) } - - Some(new Moments(count, mean, m2, m3, m4)) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index aff446ebd..06a7caddd 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -25,8 +25,6 @@ class MomentsLaws extends CheckProperties { val recur = Gen.lzy(opBasedGen[A](genA)) val pair = Gen.zip(recur, recur) - import Operators.Ops - Gen.frequency( (10, init), (1, pair.map { case (a, b) => a + b }) @@ -56,8 +54,9 @@ class MomentsLaws extends CheckProperties { } property("scaling by a and b is the same as scaling by a*b; similarly for addition") { - // use Int here instead of doubles so that we don't have to worry about overlfowing to Infinity and having to - // fine-tune numerical precision thresholds. + // use Int here instead of doubles so that we don't have to worry about + // overflowing to Infinity and having to fine-tune numerical precision + // thresholds. forAll(opGen, Gen.choose(0, Int.MaxValue), Gen.choose(0, Int.MaxValue)) { (mom, a0, b0) => val a = a0 & Int.MaxValue val b = b0 & Int.MaxValue @@ -75,6 +74,37 @@ class MomentsLaws extends CheckProperties { } } + property("adding double matches adding singleton Moments instance") { + forAll(opGen, Gen.choose(0, Int.MaxValue)) { (mom, x) => + val plusMoments = mom + Moments(x) + val plusDouble = mom + x + equiv.equiv(plusMoments, plusDouble) + } + } + + property("adding doubles via +, fold, aggregator should match") { + forAll(opGen, Gen.containerOf[Seq, Double](Gen.choose(0, 1000))) { (mom, xs) => + val fullViaAdd = xs.foldLeft(mom)(_ + _) + val fullViaFold = mom.fold.overTraversable(xs) + val fullViaAgg = mom + MomentsAggregator(xs) + + equiv.equiv(fullViaAdd, fullViaFold) + equiv.equiv(fullViaAdd, fullViaAgg) + } + } + + property("adding Moment instances via +, sumOption should match") { + forAll(opGen, Gen.containerOf[Seq, Double](Gen.choose(0, 1000))) { (mom, ints) => + val xs = ints.map(Moments(_)).toTraversable + val monoid = Moments.momentsMonoid + + val fullViaAdd = xs.foldLeft(mom)(_ + _) + val fullViaMonoid = mom + monoid.sum(xs) + + equiv.equiv(fullViaAdd, fullViaMonoid) + } + } + property("scaling does affect total weight, doesn't affect mean, variance, or moments") { // def sign(x: Int): Int = if (x < 0) -1 else 1 forAll(opGen, Gen.choose(0, Int.MaxValue)) { (mom, a0) => @@ -100,7 +130,7 @@ class MomentsTest extends AnyWordSpec with Matchers { * Given a list of doubles, create a Moments object to hold the list's central moments. */ def getMoments(xs: List[Double]): Moments = - MomentsAggregator(xs) + Moments.aggregator(xs) "Moments should count" in { val m1 = getMoments(List(1, 2, 3, 4, 5)) diff --git a/build.sbt b/build.sbt index 25d4aa666..87700eda9 100644 --- a/build.sbt +++ b/build.sbt @@ -69,7 +69,7 @@ val sharedSettings = Seq( Nil } }, - javacOptions ++= Seq("-target", "1.6", "-source", "1.6"), + javacOptions ++= Seq("-target", "1.8", "-source", "1.8"), libraryDependencies ++= Seq( "junit" % "junit" % "4.13.2" % Test, "com.github.sbt" % "junit-interface" % "0.13.3" % Test From e7657fdb6da75bffb2739e791534569731815317 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 24 Jan 2022 20:37:18 +0100 Subject: [PATCH 256/306] Update scalatest to 3.2.11 (#1054) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 87700eda9..c719924a7 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.13" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.10" +val scalaTestVersion = "3.2.11" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.6.0" From 2a2d73bc2716d5fb13fc8a30442312b99804a8d2 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 1 Feb 2022 21:53:29 +0100 Subject: [PATCH 257/306] Update sbt to 1.6.2 (#1055) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 3161d2146..c8fcab543 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.6.1 +sbt.version=1.6.2 From 8142c3c7a1e1beb7227b192917f4402799cb5bdb Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 13 Feb 2022 18:58:47 +0100 Subject: [PATCH 258/306] Update shapeless to 2.3.8 (#1056) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index c719924a7..9228691a8 100644 --- a/build.sbt +++ b/build.sbt @@ -296,7 +296,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.8", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.7", + "com.chuusai" %% "shapeless" % "2.3.8", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From 94eaa07f7722313c709d778706fa8623248ffd01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Mar 2022 16:36:48 +0000 Subject: [PATCH 259/306] Bump actions/checkout from 2.4.0 to 3 (#1057) Bumps [actions/checkout](https://github.com/actions/checkout) from 2.4.0 to 3. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2.4.0...v3) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 10 +++++----- .github/workflows/release.yml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9b5ef514c..36f6a477a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,7 +5,7 @@ jobs: checks: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - name: cache SBT uses: coursier/cache-action@v6 - name: Java 11 setup @@ -14,7 +14,7 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -34,7 +34,7 @@ jobs: test-coverage: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -45,7 +45,7 @@ jobs: mimaReport: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - name: cache SBT uses: coursier/cache-action@v6 - name: java ${{matrix.java}} setup @@ -65,7 +65,7 @@ jobs: microsite: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - name: Ruby setup uses: actions/setup-ruby@v1.1.3 with: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index de425193b..f58dab8f4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ jobs: publish: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2.4.0 + - uses: actions/checkout@v3 - uses: olafurpg/setup-scala@v13 - name: Publish ${{ github.ref }} run: sbt ci-release From e941663f86d3e6ebfebf609c6a2669f8cb578d5a Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 23 Mar 2022 17:20:38 +0100 Subject: [PATCH 260/306] Update scala-collection-compat to 2.7.0 (#1058) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 9228691a8..d9ad7ee45 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.11" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.6.0" +val scalaCollectionCompat = "2.7.0" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From 3cceaae19b29cc91f1f60b82d40ef20df34fdff9 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 28 Mar 2022 02:21:34 +0200 Subject: [PATCH 261/306] Update shapeless to 2.3.9 (#1059) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d9ad7ee45..d48d50539 100644 --- a/build.sbt +++ b/build.sbt @@ -296,7 +296,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.8", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.8", + "com.chuusai" %% "shapeless" % "2.3.9", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From 723ca3cfec2c4e76ac4f3a9c4e30e67a5b604aab Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 8 Apr 2022 19:50:14 +0200 Subject: [PATCH 262/306] Update sbt-scalafix to 0.10.0 (#1061) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 69feeebd1..10abbe949 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.34") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 4bab457e7a31744c8df6a6c99bd3bda65e9d601b Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 9 Apr 2022 06:52:06 +0200 Subject: [PATCH 263/306] Update sbt-mima-plugin to 1.1.0 (#1060) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 10abbe949..18d8df595 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") From 7e00741bbea0eff492ff1487e4ece53574739d6f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Tue, 26 Apr 2022 16:38:11 +0200 Subject: [PATCH 264/306] Update scalatest to 3.2.12 (#1062) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d48d50539..bae816701 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.13" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.11" +val scalaTestVersion = "3.2.12" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.7.0" From c18a6b7c86c34e8cc7b3747fc1ea83d42c8b5410 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Thu, 28 Apr 2022 07:52:50 +0100 Subject: [PATCH 265/306] Update scalafmt to v3.5.2 (#1063) --- .scalafmt.conf | 2 +- .../com/twitter/algebird/CountMinSketch.scala | 4 ++-- .../scala/com/twitter/algebird/Eventually.scala | 17 ++++++++++++----- .../main/scala/com/twitter/algebird/Fold.scala | 2 +- .../com/twitter/algebird/ResetAlgebra.scala | 10 +++++++--- .../scala/com/twitter/algebird/Window.scala | 2 +- 6 files changed, 24 insertions(+), 13 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index b7f5911ec..7d23dff4c 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.3.1 +version=3.5.2 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index 3e8d47c39..f000c7fe3 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -552,7 +552,7 @@ object CMS { * and thus the returned frequency estimates are always instances of `Approximate[Long]`. * * @example - * {{{ + * {{{ * * // Creates a monoid for a CMS that can count `Long` elements. val cmsMonoid: CMSMonoid[Long] = { val eps = * 0.001 val delta = 1E-10 val seed = 1 CMS.monoid[Long](eps, delta, seed) } @@ -560,7 +560,7 @@ object CMS { * // Creates a CMS instance that has counted the element `1L`. val cms: CMS[Long] = cmsMonoid.create(1L) * * // Estimates the frequency of `1L` val estimate: Approximate[Long] = cms.frequency(1L) - * }}} + * }}} * * @tparam K * The type used to identify the elements to be counted. diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index ea4c8d8d6..19522f589 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -23,11 +23,18 @@ import scala.collection.compat._ * type O and the eventual type E. In the case of Semigroup, we specify * - Two Semigroups eventualSemigroup and originalSemigroup * - A Semigroup homomorphism convert: O => E - * - A conditional mustConvert: O => Boolean Then we get a Semigroup[Either[E,O]], where: Left(x) + Left(y) - * = Left(x+y) Left(x) + Right(y) = Left(x+convert(y)) Right(x) + Left(y) = Left(convert(x)+y) Right(x) + - * Right(y) = Left(convert(x+y)) if mustConvert(x+y) Right(x+y) otherwise. EventuallyMonoid, EventuallyGroup, - * and EventuallyRing are defined analogously, with the contract that convert respect the appropriate - * structure. + * - A conditional mustConvert: O => Boolean + * + * Then we get a Semigroup[Either[E,O]], where: + * {{{ + * Left(x) + Left(y) = Left(x+y) + * Left(x) + Right(y) = Left(x+convert(y)) + * Right(x) + Left(y) = Left(convert(x)+y) + * Right(x) + Right(y) = Left(convert(x+y)) if mustConvert(x+y) Right(x+y) otherwise. + * }}} + * + * EventuallyMonoid, EventuallyGroup, and EventuallyRing are defined analogously, with the contract that + * convert respect the appropriate structure. * * @param E * eventual type diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index ed95d61f5..c2f21d145 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -238,7 +238,7 @@ object Fold extends CompatFold { /** * A Fold that does no work and returns a constant. Analogous to Function1 const: def const[A, B](b: B): (A - * => B) = { _ => b } + * \=> B) = { _ => b } */ def const[O](value: O): Fold[Any, O] = Fold.foldLeft(value) { case (u, _) => u } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala index 63273e575..72011760c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ResetAlgebra.scala @@ -16,9 +16,13 @@ limitations under the License. package com.twitter.algebird /** - * Used to represent cases where we need to periodically reset a + b = a + b - * |a + b = |(a + b) a + |b = |b - * |a + |b = |b + * Used to represent cases where we need to periodically reset + * {{{ + * a + b = a + b + * |a + b = |(a + b) + * a + |b = |b + * |a + |b = |b + * }}} */ sealed trait ResetState[+A] { def get: A } case class SetValue[+A](override val get: A) extends ResetState[A] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala index 59db253ff..8df431d7e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala @@ -31,7 +31,7 @@ import Operators._ * Example usage: * * case class W28[T](window: Window[T]) { def total = this.window.total def items = this.window.items def size - * = this.window.size } + * \= this.window.size } * * object W28 { val windowSize = 28 def apply[T](v: T): W28[T] = W28[T](Window(v)) * From e3b4565386e3a85f7c971902c3706d84b6fcb5e3 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 11 Aug 2022 17:21:25 +0200 Subject: [PATCH 266/306] Update scalatest to 3.2.13 (#1072) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index bae816701..d9a2a0f66 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.13" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.12" +val scalaTestVersion = "3.2.13" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.7.0" From 8894addd6b68604f184f4c4b075e87d171b24ac3 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 11 Aug 2022 17:21:45 +0200 Subject: [PATCH 267/306] Update scala-collection-compat to 2.8.1 (#1071) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d9a2a0f66..6b6045077 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.13" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.7.0" +val scalaCollectionCompat = "2.8.1" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From 974523eae0ddaa5958c7d35135557b21b83dad4c Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 11 Aug 2022 17:23:57 +0200 Subject: [PATCH 268/306] Update sbt-scalafix to 0.10.1 (#1065) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 18d8df595..99deee604 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,5 +12,5 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.0") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 1a2a82cf52a916f248f57be9d86fb088d97458c0 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 19 Aug 2022 16:02:34 +0200 Subject: [PATCH 269/306] Update sbt to 1.7.1 (#1070) Co-authored-by: Filipe Regadas --- .github/workflows/ci.yml | 6 +++--- project/build.properties | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 36f6a477a..be0399ab8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: scala: - 2.11.12 - 2.12.15 - - 2.13.7 + - 2.13.8 test-coverage: runs-on: ubuntu-latest steps: @@ -40,7 +40,7 @@ jobs: - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v13 - run: | - sbt ++2.12.12 coverage test coverageReport + sbt ++2.12.15 coverage test coverageReport bash <(curl -s https://codecov.io/bash) mimaReport: runs-on: ubuntu-latest @@ -61,7 +61,7 @@ jobs: scala: - 2.11.12 - 2.12.15 - - 2.13.7 + - 2.13.8 microsite: runs-on: ubuntu-latest steps: diff --git a/project/build.properties b/project/build.properties index c8fcab543..22af2628c 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.6.2 +sbt.version=1.7.1 From 7e03456a1e0115ac53fad876633a47c5eb01b7ae Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 24 Aug 2022 18:37:53 +0200 Subject: [PATCH 270/306] Update scala-library, scala-reflect to 2.12.16 (#1066) --- .github/workflows/ci.yml | 6 +++--- build.sbt | 2 +- project/plugins.sbt | 4 +++- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index be0399ab8..df3cdaf28 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.15 + - 2.12.16 - 2.13.8 test-coverage: runs-on: ubuntu-latest @@ -40,7 +40,7 @@ jobs: - name: java ${{matrix.java}} setup uses: olafurpg/setup-scala@v13 - run: | - sbt ++2.12.15 coverage test coverageReport + sbt ++2.12.16 coverage test coverageReport bash <(curl -s https://codecov.io/bash) mimaReport: runs-on: ubuntu-latest @@ -60,7 +60,7 @@ jobs: - 11 scala: - 2.11.12 - - 2.12.15 + - 2.12.16 - 2.13.8 microsite: runs-on: ubuntu-latest diff --git a/build.sbt b/build.sbt index 6b6045077..d4b474a1a 100644 --- a/build.sbt +++ b/build.sbt @@ -39,7 +39,7 @@ crossScalaVersions := Nil val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.15", + scalaVersion := "2.12.16", crossScalaVersions := Seq("2.11.12", scalaVersion.value), resolvers ++= Seq( Opts.resolver.sonatypeSnapshots, diff --git a/project/plugins.sbt b/project/plugins.sbt index 99deee604..491ce007e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,9 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") + +dependencyOverrides += "org.scala-lang.modules" %% "scala-xml" % "2.1.0" From 5dcafc0011d3853b3cca367fb4261da51a1fac3b Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Wed, 24 Aug 2022 18:02:29 +0100 Subject: [PATCH 271/306] Update scalafmt to v3.5.9 (#1076) --- .scalafmt.conf | 2 +- .../com/twitter/algebird/AdaptiveVector.scala | 2 +- .../scala/com/twitter/algebird/ExpHist.scala | 2 +- .../algebird/GeneratedProductAlgebra.scala | 55 +++++++++++++++---- .../com/twitter/algebird/HyperLogLog.scala | 6 +- .../com/twitter/algebird/JavaMonoids.scala | 2 +- .../com/twitter/algebird/MapAlgebra.scala | 6 +- .../scala/com/twitter/algebird/Metric.scala | 4 +- .../com/twitter/algebird/MomentsGroup.scala | 4 +- .../scala/com/twitter/algebird/QTree.scala | 2 +- .../com/twitter/algebird/SGDMonoid.scala | 2 +- .../com/twitter/algebird/SpaceSaver.scala | 2 +- .../com/twitter/algebird/TopKMonoid.scala | 2 +- .../com/twitter/algebird/BaseProperties.scala | 2 +- .../com/twitter/algebird/MonadLaws.scala | 4 +- .../twitter/algebird/ApproximateTest.scala | 4 +- .../algebird/CollectionSpecification.scala | 34 ++++++------ .../twitter/algebird/CorrelationLaws.scala | 4 +- .../twitter/algebird/DecayingCMSTest.scala | 6 +- .../com/twitter/algebird/EventuallyTest.scala | 4 +- .../twitter/algebird/HyperLogLogTest.scala | 2 +- .../com/twitter/algebird/IntervalLaws.scala | 22 ++++---- .../com/twitter/algebird/MinHasherTest.scala | 2 +- .../com/twitter/algebird/MomentsLaws.scala | 4 +- .../com/twitter/algebird/QTreeTest.scala | 4 +- .../scala/com/twitter/algebird/SGDTest.scala | 8 +-- .../com/twitter/algebird/SketchMapTest.scala | 2 +- .../algebird/immutable/BitSetTest.scala | 4 +- .../GaussianDistributionMonoidTests.scala | 2 +- .../util/summer/AsyncListSumProperties.scala | 2 +- .../util/summer/AsyncMapSumProperties.scala | 2 +- .../HeavyHittersCachingSummerProperties.scala | 2 +- 32 files changed, 119 insertions(+), 86 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index 7d23dff4c..fe6f27286 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.5.2 +version=3.5.9 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala index b04eaa448..e47fb8792 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala @@ -120,7 +120,7 @@ object AdaptiveVector { def isZero(v: AdaptiveVector[V]): Boolean = (v.size == 0) || { val sparseAreZero = - if (Monoid.isNonZero(v.sparseValue)) (v.denseCount == v.size) else true + if (Monoid.isNonZero(v.sparseValue)) v.denseCount == v.size else true sparseAreZero && v.denseIterator.forall(idxv => !Monoid.isNonZero(idxv._2)) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index 567580fd0..2f6d6e988 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -136,7 +136,7 @@ case class ExpHist( */ def guess: Double = if (total == 0) 0.0 - else (total - (oldestBucketSize - 1) / 2.0) + else total - (oldestBucketSize - 1) / 2.0 /** * Returns an Approximate instance encoding the bounds and the closest long to the estimated sum tracked by diff --git a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala index f43b4eb5b..d1de72a2a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedProductAlgebra.scala @@ -5041,7 +5041,10 @@ trait ProductSemigroups { msemigroup: Semigroup[M], nsemigroup: Semigroup[N] ): Semigroup[X] = - new Product14Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N](applyX, unapplyX)( + new Product14Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5078,7 +5081,10 @@ trait ProductSemigroups { nsemigroup: Semigroup[N], osemigroup: Semigroup[O] ): Semigroup[X] = - new Product15Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](applyX, unapplyX)( + new Product15Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5117,7 +5123,10 @@ trait ProductSemigroups { osemigroup: Semigroup[O], psemigroup: Semigroup[P] ): Semigroup[X] = - new Product16Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](applyX, unapplyX)( + new Product16Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5158,7 +5167,10 @@ trait ProductSemigroups { psemigroup: Semigroup[P], qsemigroup: Semigroup[Q] ): Semigroup[X] = - new Product17Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](applyX, unapplyX)( + new Product17Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5201,7 +5213,10 @@ trait ProductSemigroups { qsemigroup: Semigroup[Q], rsemigroup: Semigroup[R] ): Semigroup[X] = - new Product18Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](applyX, unapplyX)( + new Product18Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5246,7 +5261,10 @@ trait ProductSemigroups { rsemigroup: Semigroup[R], ssemigroup: Semigroup[S] ): Semigroup[X] = - new Product19Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](applyX, unapplyX)( + new Product19Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5293,7 +5311,10 @@ trait ProductSemigroups { ssemigroup: Semigroup[S], tsemigroup: Semigroup[T] ): Semigroup[X] = - new Product20Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](applyX, unapplyX)( + new Product20Semigroup[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( + applyX, + unapplyX + )( asemigroup, bsemigroup, csemigroup, @@ -5892,7 +5913,10 @@ trait ProductMonoids { rmonoid: Monoid[R], smonoid: Monoid[S] ): Monoid[X] = - new Product19Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](applyX, unapplyX)( + new Product19Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( + applyX, + unapplyX + )( amonoid, bmonoid, cmonoid, @@ -5939,7 +5963,10 @@ trait ProductMonoids { smonoid: Monoid[S], tmonoid: Monoid[T] ): Monoid[X] = - new Product20Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](applyX, unapplyX)( + new Product20Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( + applyX, + unapplyX + )( amonoid, bmonoid, cmonoid, @@ -5988,7 +6015,10 @@ trait ProductMonoids { tmonoid: Monoid[T], umonoid: Monoid[U] ): Monoid[X] = - new Product21Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](applyX, unapplyX)( + new Product21Monoid[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( + applyX, + unapplyX + )( amonoid, bmonoid, cmonoid, @@ -6675,7 +6705,10 @@ trait ProductGroups { ugroup: Group[U], vgroup: Group[V] ): Group[X] = - new Product22Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](applyX, unapplyX)( + new Product22Group[X, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( + applyX, + unapplyX + )( agroup, bgroup, cgroup, diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index 9bebfd22a..adac1141d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -204,7 +204,7 @@ object HyperLogLog { assert(bb.remaining % (jLen + 1) == 0, "Invalid byte array") val maxRhow = (1 to bb.remaining / (jLen + 1)).map { _ => val j = jLen match { - case 1 => (bb.get.toInt & 0xff) + case 1 => bb.get.toInt & 0xff case 2 => (bb.get.toInt & 0xff) + ((bb.get.toInt & 0xff) << 8) case 3 => (bb.get.toInt & 0xff) + ((bb.get.toInt & 0xff) << 8) + ((bb.get.toInt & 0xff) << 16) @@ -494,7 +494,7 @@ case class DenseHLL(override val bits: Int, v: Bytes) extends HLL { override def +(other: HLL): HLL = other match { - case SparseHLL(_, _) => (other + this) + case SparseHLL(_, _) => other + this case DenseHLL(_, ov) => assert(ov.size == v.size, "Incompatible HLL size: " + ov.size + " != " + v.size) @@ -526,7 +526,7 @@ case class DenseHLL(override val bits: Int, v: Bytes) extends HLL { while (idx < arrSize) { val maxb = arr(idx) - buffer.update(idx, (buffer(idx)).max(maxb)) + buffer.update(idx, buffer(idx).max(maxb)) idx += 1 } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala index b9b829e75..d800ec791 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala @@ -107,7 +107,7 @@ class JMapMonoid[K, V: Semigroup] extends Monoid[JMap[K, V]] { val nonZero: (V => Boolean) = implicitly[Semigroup[V]] match { case mon: Monoid[_] => mon.isNonZero(_) - case _ => (_ => true) + case _ => _ => true } override def isNonZero(x: JMap[K, V]): Boolean = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala index 17cba9f59..8ee81c42d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala @@ -33,7 +33,7 @@ abstract class GenericMapMonoid[K, V, M <: ScMap[K, V]](implicit val semigroup: val nonZero: (V => Boolean) = semigroup match { case mon: Monoid[_] => mon.isNonZero(_) - case _ => (_ => true) + case _ => _ => true } override def isNonZero(x: M): Boolean = @@ -73,7 +73,7 @@ abstract class GenericMapMonoid[K, V, M <: ScMap[K, V]](implicit val semigroup: } .getOrElse(kv._2) if (nonZero(newV)) - add(oldMap, (kv._1 -> newV)) + add(oldMap, kv._1 -> newV) else remove(oldMap, kv._1) } @@ -151,7 +151,7 @@ trait GenericMapRing[K, V, M <: ScMap[K, V]] extends Rng[M] with MapOperations[K val newV = if (bigOnLeft) ring.times(bigV, kv._2) else ring.times(kv._2, bigV) if (ring.isNonZero(newV)) { - add(oldMap, (kv._1 -> newV)) + add(oldMap, kv._1 -> newV) } else { remove(oldMap, kv._1) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala index 4fb32e05d..e5c6df39b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Metric.scala @@ -92,7 +92,7 @@ object Metric { implicit val longMetric: Metric[Long] = Metric.from((a: Long, b: Long) => math.abs((a - b).toDouble)) implicit val floatMetric: Metric[Float] = - Metric.from((a: Float, b: Float) => math.abs((a.toDouble - b.toDouble))) + Metric.from((a: Float, b: Float) => math.abs(a.toDouble - b.toDouble)) implicit val shortMetric: Metric[Short] = Metric.from((a: Short, b: Short) => math.abs((a - b).toDouble)) implicit val boolMetric: Metric[Boolean] = @@ -104,7 +104,7 @@ object Metric { implicit val jLongMetric: Metric[JLong] = Metric.from((a: JLong, b: JLong) => math.abs((a - b).toDouble)) implicit val jFloatMetric: Metric[JFloat] = - Metric.from((a: JFloat, b: JFloat) => math.abs((a.toDouble - b.toDouble))) + Metric.from((a: JFloat, b: JFloat) => math.abs(a.toDouble - b.toDouble)) implicit val jShortMetric: Metric[JShort] = Metric.from((a: JShort, b: JShort) => math.abs((a - b).toDouble)) implicit val jBoolMetric: Metric[JBool] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala index 733efb9ed..74eb5a428 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MomentsGroup.scala @@ -297,7 +297,7 @@ object Moments { case newCount => val scaling = weightK / newCount // a_n + (a_k - a_n)*(k/(n+k)) is only stable if n is not approximately k - if (scaling < STABILITY_CONSTANT) (an + (ak - an) * scaling) + if (scaling < STABILITY_CONSTANT) an + (ak - an) * scaling else (weightN * an + weightK * ak) / newCount } @@ -330,7 +330,7 @@ class MomentsMonoid extends Monoid[Moments] with CommutativeMonoid[Moments] { case newCount => val scaling = k.toDouble / newCount // a_n + (a_k - a_n)*(k/(n+k)) is only stable if n is not approximately k - if (scaling < STABILITY_CONSTANT) (an + (ak - an) * scaling) + if (scaling < STABILITY_CONSTANT) an + (ak - an) * scaling else (n * an + k * ak) / newCount } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index 774e1ac7c..2376cfbf8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -421,7 +421,7 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( * A debug method that prints the QTree to standard out using print/println */ def dump(): Unit = { - for (_ <- (20 to _level by -1)) + for (_ <- 20 to _level by -1) print(" ") print(lowerBound + " - " + upperBound + ": " + _count) if (lowerChild.isDefined || upperChild.isDefined) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala index 5ec0b4a0d..5f54e984a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SGDMonoid.scala @@ -37,7 +37,7 @@ object SGD { def constantStep(s: Double): (Long, IndexedSeq[Double]) => Double = { (_, _) => s } // A standard: a/(steps + b)^c def countAdaptiveStep(a: Double, b: Double, c: Double = 1.0): (Long, IndexedSeq[Double]) => Double = { - (cnt, _) => a / scala.math.pow((cnt + b), c) + (cnt, _) => a / scala.math.pow(cnt + b, c) } def weightsOf[T](s: SGD[T]): Option[IndexedSeq[Double]] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 43e53c250..68830547e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -278,7 +278,7 @@ case class SSMany[T] private ( .map { key => val (count1, err1) = counters.getOrElse(key, (min, min)) val (count2, err2) = x.counters.getOrElse(key, (x.min, x.min)) - (key -> ((count1 + count2, err1 + err2))) + key -> ((count1 + count2, err1 + err2)) } .sorted(SpaceSaver.ordering) .take(capacity) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala index 4eca0b803..7ad831bbb 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/TopKMonoid.scala @@ -66,7 +66,7 @@ class TopKMonoid[T](k: Int)(implicit ord: Ordering[T]) extends Monoid[TopK[T]] { for { biggest <- bigger.max smallest <- smaller.items.headOption - } yield (ord.lteq(biggest, smallest)) + } yield ord.lteq(biggest, smallest) if (biggerWins.getOrElse(true)) { // smaller is small, or empty bigger } else { diff --git a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala index e96cb2c5f..4b81738d5 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/BaseProperties.scala @@ -35,7 +35,7 @@ object BaseProperties extends MetricProperties { scale <- Gen.choose(-7, +7) base <- implicitly[Arbitrary[Int]].arbitrary } yield { - (BigDecimal(base) * BigDecimal(10).pow(scale)) + BigDecimal(base) * BigDecimal(10).pow(scale) }) // $COVERAGE-OFF$Turn off coverage for deprecated laws. diff --git a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala index bc46aac3d..b38e9f4b4 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/MonadLaws.scala @@ -28,7 +28,7 @@ import org.scalacheck.Prop object MonadLaws { // $COVERAGE-OFF$Turn off coverage for deprecated laws. @deprecated("No longer used. Use Equiv[T] instance", since = "0.13.0") - def defaultEq[T]: (T, T) => Boolean = { (t0: T, t1: T) => (t0 == t1) } + def defaultEq[T]: (T, T) => Boolean = { (t0: T, t1: T) => t0 == t1 } @deprecated("use leftIdentity[T]", since = "0.13.0") def leftIdentityEquiv[M[_], T, U](implicit @@ -62,7 +62,7 @@ object MonadLaws { for { m <- implicitly[Arbitrary[Map[T, M[U]]]].arbitrary defu <- implicitly[Arbitrary[M[U]]].arbitrary - } yield ({ (t: T) => m.getOrElse(t, defu) }) + } yield { (t: T) => m.getOrElse(t, defu) } } @deprecated("use monadLaws[T]", since = "0.13.0") diff --git a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala index 50fbd57c6..7ad92bca5 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/ApproximateTest.scala @@ -18,7 +18,7 @@ class ApproximateLaws extends CheckProperties { for { v0 <- choose(0L, (1L << 15) - 2) v1 <- choose(v0, (1L << 15) - 1) - v2 <- choose(v1, (1L << 15)) + v2 <- choose(v1, 1L << 15) } yield Approximate(v0, v1, v2, 0.9) } @@ -88,7 +88,7 @@ class ApproximateLaws extends CheckProperties { (a || b).withProb >= (a.withProb * b.withProb) && (a && b).isTrue == (a.isTrue && b.isTrue) && (a && b).withProb >= (a.withProb * b.withProb) && - (a.not).isTrue == (!(a.isTrue)) && + a.not.isTrue == (!a.isTrue) && (a.not.withProb) == a.withProb } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala index feb6e8f70..2c641dbe6 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CollectionSpecification.scala @@ -47,7 +47,7 @@ class CollectionSpecification extends CheckProperties { property("List plus") { forAll { (a: List[Int], b: List[Int]) => val mon = implicitly[Monoid[List[Int]]] - ((a ++ b == mon.plus(a, b)) && (mon.zero == List[Int]())) + (a ++ b == mon.plus(a, b)) && (mon.zero == List[Int]()) } } @@ -61,7 +61,7 @@ class CollectionSpecification extends CheckProperties { property("Seq plus") { forAll { (a: Seq[Int], b: Seq[Int]) => val mon = implicitly[Monoid[Seq[Int]]] - ((a ++ b == mon.plus(a, b)) && (mon.zero == Seq[Int]())) + (a ++ b == mon.plus(a, b)) && (mon.zero == Seq[Int]()) } } @@ -77,7 +77,7 @@ class CollectionSpecification extends CheckProperties { property("Set plus") { forAll { (a: Set[Int], b: Set[Int]) => val mon = implicitly[Monoid[Set[Int]]] - ((a ++ b == mon.plus(a, b)) && (mon.zero == Set[Int]())) + (a ++ b == mon.plus(a, b)) && (mon.zero == Set[Int]()) } } @@ -109,9 +109,9 @@ class CollectionSpecification extends CheckProperties { ): Prop = forAll { (a: M, b: M) => // Subsets because zeros are removed from the times/plus values - ((rng.times(a, b)).keys.toSet.subsetOf((a.keys.toSet & b.keys.toSet)) && - (rng.plus(a, b)).keys.toSet.subsetOf((a.keys.toSet | b.keys.toSet)) && - (rng.plus(a, a).keys == a.filter(kv => (kv._2 + kv._2) != 0).keys)) + rng.times(a, b).keys.toSet.subsetOf(a.keys.toSet & b.keys.toSet) && + rng.plus(a, b).keys.toSet.subsetOf(a.keys.toSet | b.keys.toSet) && + (rng.plus(a, a).keys == a.filter(kv => (kv._2 + kv._2) != 0).keys) } property("Map plus/times keys") { @@ -207,11 +207,11 @@ class CollectionSpecification extends CheckProperties { } property("MapAlgebra.removeZeros works") { - forAll((m: Map[Int, Int]) => (MapAlgebra.removeZeros(m).values.toSet.contains(0) == false)) + forAll((m: Map[Int, Int]) => MapAlgebra.removeZeros(m).values.toSet.contains(0) == false) } property("Monoid.sum performs w/ or w/o MapAlgebra.removeZeros") { - forAll((m: Map[Int, Int]) => (Monoid.sum(m) == Monoid.sum(MapAlgebra.removeZeros(m)))) + forAll((m: Map[Int, Int]) => Monoid.sum(m) == Monoid.sum(MapAlgebra.removeZeros(m))) } property("MapAlgebra.sumByKey works") { @@ -239,20 +239,20 @@ class CollectionSpecification extends CheckProperties { property("MapAlgebra.dot works") { forAll { (m1: Map[Int, Int], m2: Map[Int, Int]) => // .toList below is to make sure we don't remove duplicate values - (MapAlgebra.dot(m1, m2) == - (m1.keySet ++ m2.keySet).toList.map(k => m1.getOrElse(k, 0) * m2.getOrElse(k, 0)).sum) + MapAlgebra.dot(m1, m2) == + (m1.keySet ++ m2.keySet).toList.map(k => m1.getOrElse(k, 0) * m2.getOrElse(k, 0)).sum } } property("MapAlgebra.toGraph is correct") { forAll { (l: Set[(Int, Int)]) => - (MapAlgebra + MapAlgebra .toGraph(l) .toIterator .flatMap { case (k, sv) => sv.map(v => (k, v)) } - .toSet == l) + .toSet == l } } @@ -273,13 +273,13 @@ class CollectionSpecification extends CheckProperties { (v, ks) <- m2.toIterable k <- ks.toIterable } yield Map(k -> v)) - (m3 == m) + m3 == m } } property("MapAlgebra.invertExact works") { forAll { (m: Map[Option[Int], Set[Int]]) => - (MapAlgebra.invertExact(MapAlgebra.invertExact(m)) == m.filterKeys(_.isDefined).toMap) + MapAlgebra.invertExact(MapAlgebra.invertExact(m)) == m.filterKeys(_.isDefined).toMap } } @@ -297,7 +297,7 @@ class CollectionSpecification extends CheckProperties { .mapValues(_.get) .toMap val m1Orm2 = m1.keySet | m2.keySet - ((m1after == m1) && (m2after == m2) && (m3.keySet == m1Orm2)) + (m1after == m1) && (m2after == m2) && (m3.keySet == m1Orm2) } } @@ -306,13 +306,13 @@ class CollectionSpecification extends CheckProperties { property("MapAlgebra.mergeLookup works") { forAll { (items: Set[Int]) => - (mapEq.equiv( + mapEq.equiv( MapAlgebra.mergeLookup[Int, Option[Int], Int](items)(square)(_ => None), Map((None: Option[Int]) -> Monoid.sum(items.map(x => square(x).getOrElse(0)))) ) && mapEq.equiv( MapAlgebra.mergeLookup[Int, Int, Int](items)(square)(identity), MapAlgebra.sumByKey(items.map(x => x -> square(x).getOrElse(0))) - )) + ) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala index 488d86235..ea55c60b9 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CorrelationLaws.scala @@ -95,8 +95,8 @@ class CorrelationLaws extends CheckProperties { forAll { (corr: Correlation, a0: Int, b0: Int) => val a = a0 & Int.MaxValue val b = b0 & Int.MaxValue - (corrApproxEq(corr.scale(a).scale(b), corr.scale(a.toDouble * b)) && - corrApproxEq(corr.scale(a.toDouble + b), CorrelationMonoid.plus(corr.scale(a), corr.scale(b)))) + corrApproxEq(corr.scale(a).scale(b), corr.scale(a.toDouble * b)) && + corrApproxEq(corr.scale(a.toDouble + b), CorrelationMonoid.plus(corr.scale(a), corr.scale(b))) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala index f33aba363..e550c6c81 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/DecayingCMSTest.scala @@ -329,7 +329,7 @@ class DecayingCMSProperties extends CheckProperties { val g = genCms(module, stdKey, genTimestamp(module), stdVal) forAll(g, g) { (x, y) => // abs(x + y) <= abs(x) + abs(y) - val lhs = ((x + y).l2Norm).timeToUnit + val lhs = (x + y).l2Norm.timeToUnit val rhs = (x.l2Norm + y.l2Norm).timeToUnit Prop(lhs <= rhs || close(lhs, rhs)) } @@ -391,9 +391,9 @@ class DecayingCMSProperties extends CheckProperties { val dvm = new DecayedValueMonoid(0.0) val dv = dvm.sum(inputs.map { case (t, n) => - DecayedValue.build(n, (t.toDouble / 1000.0), halfLifeSecs) + DecayedValue.build(n, t.toDouble / 1000.0, halfLifeSecs) }) - val expected = dvm.valueAsOf(dv, halfLifeSecs, (tlast.toDouble / 1000.0)) + val expected = dvm.valueAsOf(dv, halfLifeSecs, tlast.toDouble / 1000.0) val cms0 = module.monoid.zero val cmss = inputs.map { case (t, n) => cms0.add(t, key, n) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala index fc5fa7015..e91482291 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/EventuallyTest.scala @@ -28,8 +28,8 @@ class EventuallyRingLaws extends CheckProperties { Equiv.fromFunction[Either[Long, Int]] { case (Right(a), Right(b)) => a == b case (Left(a), Left(b)) => a == b - case (Right(a), Left(b)) => (a.toLong == b) - case (Left(a), Right(b)) => (a == (b.toLong)) + case (Right(a), Left(b)) => a.toLong == b + case (Left(a), Right(b)) => a == (b.toLong) } Prop.forAll { (pred: Int => Boolean) => implicit val evRing = new EventuallyRing[Long, Int](_.toLong)(pred) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala index ea3ed65f9..8d3904751 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/HyperLogLogTest.scala @@ -309,7 +309,7 @@ class HyperLogLogTest extends AnyWordSpec with Matchers { val larger = bigMon.create(1) // uses implicit long2Bytes to make 8 byte array val smaller = smallMon.create(1) // uses implicit int2Bytes to make 4 byte array intercept[AssertionError] { - (larger + smaller) + larger + smaller } } "Correctly serialize" in { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala index 22981c5fd..9c8928bc3 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/IntervalLaws.scala @@ -131,19 +131,19 @@ class IntervalLaws extends CheckProperties { } property("(x, inf) and (-inf, y) intersects if and only if y > x") { - forAll((x: Long, y: Long) => ((y > x) == ExclusiveLower(x).intersects(ExclusiveUpper(y)))) + forAll((x: Long, y: Long) => (y > x) == ExclusiveLower(x).intersects(ExclusiveUpper(y))) } property("(x, inf) and (-inf, y] intersect if and only if y > x") { - forAll((x: Long, y: Long) => ((y > x) == ExclusiveLower(x).intersects(InclusiveUpper(y)))) + forAll((x: Long, y: Long) => (y > x) == ExclusiveLower(x).intersects(InclusiveUpper(y))) } property("[x, inf) and (-inf, y) intersect if and only if y > x") { - forAll((x: Long, y: Long) => ((y > x) == InclusiveLower(x).intersects(ExclusiveUpper(y)))) + forAll((x: Long, y: Long) => (y > x) == InclusiveLower(x).intersects(ExclusiveUpper(y))) } property("[x, inf) and (-inf, y] intersect if and only if y >= x") { - forAll((x: Long, y: Long) => ((y >= x) == InclusiveLower(x).intersects(InclusiveUpper(y)))) + forAll((x: Long, y: Long) => (y >= x) == InclusiveLower(x).intersects(InclusiveUpper(y))) } def lowerUpperIntersection(low: Lower[Long], upper: Upper[Long], items: List[Long]): Boolean = @@ -172,26 +172,26 @@ class IntervalLaws extends CheckProperties { } property("If an a Lower intersects an Upper, the intersection is non Empty") { forAll { (low: Lower[Long], upper: Upper[Long], items: List[Long]) => - (lowerUpperIntersection(low, upper, items)) + lowerUpperIntersection(low, upper, items) } } // This specific case broke the tests before property("(n, n+1) follows the intersect law") { forAll { (n: Long) => - ((n == Long.MaxValue) || - lowerUpperIntersection(ExclusiveLower(n), ExclusiveUpper(n + 1L), Nil)) + (n == Long.MaxValue) || + lowerUpperIntersection(ExclusiveLower(n), ExclusiveUpper(n + 1L), Nil) } } property("toLeftClosedRightOpen is an Injection") { forAll { (intr: GenIntersection[Long], tests: List[Long]) => - (intr.toLeftClosedRightOpen + intr.toLeftClosedRightOpen .map { case Intersection(InclusiveLower(low), ExclusiveUpper(high)) => val intr2 = Interval.leftClosedRightOpen(low, high) tests.forall(t => intr(t) == intr2(t)) } - .getOrElse(true)) // none means this can't be expressed as this kind of interval + .getOrElse(true) // none means this can't be expressed as this kind of interval } } @@ -228,7 +228,7 @@ class IntervalLaws extends CheckProperties { property("leastToGreatest and greatestToLeast are ordered and adjacent") { forAll { (intr: GenIntersection[Long]) => val items1 = intr.leastToGreatest.take(100) - ((items1.size < 2) || items1.sliding(2).forall { it => + (items1.size < 2) || items1.sliding(2).forall { it => it.toList match { case low :: high :: Nil if low + 1L == high => true case _ => false @@ -241,7 +241,7 @@ class IntervalLaws extends CheckProperties { case _ => false } } - }) + } } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala index 1e4453710..1ec842405 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MinHasherTest.scala @@ -11,7 +11,7 @@ import java.{util => ju} class MinHasherTest extends CheckProperties { implicit val mhMonoid: MinHasher32 = new MinHasher32(0.5, 512) implicit val mhGen: Arbitrary[MinHashSignature] = Arbitrary { - for (v <- Gen.choose(0, 10000)) yield (mhMonoid.init(v)) + for (v <- Gen.choose(0, 10000)) yield mhMonoid.init(v) } property("MinHasher is a commutative monoid") { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala index 06a7caddd..45bb88095 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/MomentsLaws.scala @@ -60,8 +60,8 @@ class MomentsLaws extends CheckProperties { forAll(opGen, Gen.choose(0, Int.MaxValue), Gen.choose(0, Int.MaxValue)) { (mom, a0, b0) => val a = a0 & Int.MaxValue val b = b0 & Int.MaxValue - (equiv.equiv(mom.scale(a).scale(b), mom.scale(a.toDouble * b)) && - equiv.equiv(mom.scale(a.toDouble + b), Monoid.plus(mom.scale(a), mom.scale(b)))) + equiv.equiv(mom.scale(a).scale(b), mom.scale(a.toDouble * b)) && + equiv.equiv(mom.scale(a.toDouble + b), Monoid.plus(mom.scale(a), mom.scale(b))) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala index 2e7351954..90ce4deda 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/QTreeTest.scala @@ -26,7 +26,7 @@ class QTreeLaws extends CheckProperties { implicit val qtSemigroup: QTreeSemigroup[Long] = new QTreeSemigroup[Long](4) implicit val qtGen: Arbitrary[QTree[Long]] = Arbitrary { - for (v <- choose(0L, 10000L)) yield (QTree(v)) + for (v <- choose(0L, 10000L)) yield QTree(v) } property("QTree is associative") { @@ -67,7 +67,7 @@ class QTreeTest extends AnyWordSpec with Matchers { } } - for (k <- (1 to 6)) + for (k <- 1 to 6) ("QTree with sizeHint 2^" + k) should { "always contain the true quantile within its bounds" in { val list = randomList(10000) diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala index 6cf7e8006..b675dcdd5 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SGDTest.scala @@ -44,22 +44,22 @@ class SGDLaws extends CheckProperties { val b = w.weights(1) val y = m * x + b - (y.isInfinity || { + y.isInfinity || { val pos = (y, IndexedSeq(x)) val grad = SGD.linearGradient(w.weights, pos) (scala.math.abs(grad(0)) < eps) && (scala.math.abs(grad(1)) < eps) - }) + } } } property("Gradient at x=0 has zero first component") { - forAll((w: SGDWeights, y: Double) => (SGD.linearGradient(w.weights, (y, IndexedSeq(0.0)))(0) == 0.0)) + forAll((w: SGDWeights, y: Double) => SGD.linearGradient(w.weights, (y, IndexedSeq(0.0)))(0) == 0.0) } property("Zero-step leaves Weights unchanged") { forAll { (w: SGDWeights, pos: SGDPos[(Double, Vector[Double])]) => val next = zeroStepMonoid.newWeights(w, pos.pos.head) - (next.weights == w.weights && next.count == (w.count + 1L)) + next.weights == w.weights && next.count == (w.count + 1L) } } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala index d5faea610..86a4452d6 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/SketchMapTest.scala @@ -20,7 +20,7 @@ class SketchMapLaws extends CheckProperties { val params: SketchMapParams[Int] = SketchMapParams[Int](SEED, EPS, 1e-3, HEAVY_HITTERS_COUNT) implicit val smMonoid: SketchMapMonoid[Int, Long] = SketchMap.monoid[Int, Long](params) implicit val smGen: Arbitrary[SketchMap[Int, Long]] = Arbitrary { - for (key: Int <- Gen.choose(0, 10000)) yield (smMonoid.create((key, 1L))) + for (key: Int <- Gen.choose(0, 10000)) yield smMonoid.create((key, 1L)) } // TODO: SketchMap's heavy hitters are not strictly associative diff --git a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala index aa0dd3e72..bfc13fb67 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala @@ -177,7 +177,7 @@ object BitSetTest extends Properties("BitSet") { property("(x | y)(z) == x(z) || y(z)") = forAll { (x: BitSet, y: BitSet, z: Int) => // do apply first in case we mutate erroneously def law(z: Int): Boolean = - (x(z) || y(z)) == (x | y)(z) + (x(z) || y(z)) == (x | y) (z) law(z) && x.iterator.forall(law) && y.iterator.forall(law) } @@ -202,7 +202,7 @@ object BitSetTest extends Properties("BitSet") { property("(x & y)(z) == x(z) && y(z)") = forAll { (x: BitSet, y: BitSet, z: Int) => // do apply first in case we mutate erroneously def law(z: Int): Boolean = - (x(z) && y(z)) == (x & y)(z) + (x(z) && y(z)) == (x & y) (z) law(z) && x.iterator.forall(law) && y.iterator.forall(law) } diff --git a/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala index 536460355..852400194 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/statistics/GaussianDistributionMonoidTests.scala @@ -10,7 +10,7 @@ class GaussianDistributionMonoidTests extends CheckProperties { for { mean <- Gen.choose(0, 10000) sigma <- Gen.choose(0, 10000) - } yield (GaussianDistribution(mean, sigma)) + } yield GaussianDistribution(mean, sigma) } property("GaussianDistributionMonoid is a Monoid") { diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncListSumProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncListSumProperties.scala index 2ac2639fc..f7d9f0a0b 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncListSumProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncListSumProperties.scala @@ -53,7 +53,7 @@ class AsyncListSumProperties extends CheckProperties { Compact(false), CompactionSize(0) ) - (summingWithAndWithoutSummerShouldMatch(summer, inputs)) + summingWithAndWithoutSummerShouldMatch(summer, inputs) } } diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncMapSumProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncMapSumProperties.scala index c1c2a19d8..522eeb7a4 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncMapSumProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/AsyncMapSumProperties.scala @@ -47,7 +47,7 @@ class AsyncMapSumProperties extends CheckProperties { sizeCounter, workPool ) - (summingWithAndWithoutSummerShouldMatch(summer, inputs)) + summingWithAndWithoutSummerShouldMatch(summer, inputs) } } diff --git a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummerProperties.scala b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummerProperties.scala index ff005d81e..709e5812b 100644 --- a/algebird-util/src/test/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummerProperties.scala +++ b/algebird-util/src/test/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummerProperties.scala @@ -64,7 +64,7 @@ class HeavyHittersCachingSummerProperties extends CheckProperties { sizeCounter, summer ) - (summingWithAndWithoutSummerShouldMatch(heavyHittersCachingSummer, inputs)) + summingWithAndWithoutSummerShouldMatch(heavyHittersCachingSummer, inputs) } } From 7557b344f4628d8eb09ffdec81875d3b041d0773 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 11 Sep 2022 08:59:54 +0100 Subject: [PATCH 272/306] Update GHA with Java 17 (#1077) --- .github/workflows/ci.yml | 44 ++++++++++++++++++----------------- .github/workflows/release.yml | 6 ++++- 2 files changed, 28 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index df3cdaf28..a1738db28 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,20 +6,20 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: cache SBT - uses: coursier/cache-action@v6 - - name: Java 11 setup - uses: olafurpg/setup-scala@v13 + - uses: actions/setup-java@v3 + with: + cache: "sbt" + distribution: "temurin" + java-version: 17 - run: sbt "; scalafmtCheckAll; scalafmtSbtCheck" "; scalafixEnable; scalafixAll --check" test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: cache SBT - uses: coursier/cache-action@v6 - - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v13 + - uses: actions/setup-java@v3 with: + cache: "sbt" + distribution: "temurin" java-version: ${{matrix.java}} - run: sbt "++${{matrix.scala}} test" strategy: @@ -27,6 +27,7 @@ jobs: java: - 8 - 11 + - 17 scala: - 2.11.12 - 2.12.16 @@ -35,10 +36,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: cache SBT - uses: coursier/cache-action@v6 - - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v13 + - uses: actions/setup-java@v3 + with: + cache: "sbt" + distribution: "temurin" + java-version: 17 - run: | sbt ++2.12.16 coverage test coverageReport bash <(curl -s https://codecov.io/bash) @@ -46,11 +48,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: cache SBT - uses: coursier/cache-action@v6 - - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v13 + - uses: actions/setup-java@v3 with: + cache: "sbt" + distribution: "temurin" java-version: ${{matrix.java}} - run: sbt "++${{matrix.scala}} mimaReportBinaryIssues" strategy: @@ -58,6 +59,7 @@ jobs: java: - 8 - 11 + - 17 scala: - 2.11.12 - 2.12.16 @@ -73,8 +75,8 @@ jobs: - run: | gem install sass gem install jekyll -v 3.2.1 - - name: cache SBT - uses: coursier/cache-action@v6 - - name: java ${{matrix.java}} setup - uses: olafurpg/setup-scala@v13 - - run: sbt docs/makeMicrosite + - uses: actions/setup-java@v3 + with: + cache: "sbt" + distribution: "temurin" + java-version: 17 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f58dab8f4..83dd1aaad 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: olafurpg/setup-scala@v13 + - uses: actions/setup-java@v3 + with: + cache: "sbt" + distribution: "temurin" + java-version: 8 - name: Publish ${{ github.ref }} run: sbt ci-release env: From a29690e8252895df0ff466af480eb0f88f857572 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 1 Oct 2022 08:31:10 +0200 Subject: [PATCH 273/306] Update sbt-scoverage to 2.0.3 (#1080) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 491ce007e..11663c638 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.2") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.3") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 4d9a72d568cb0dc43e7a909d91766c375be23c3f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 3 Oct 2022 13:14:08 +0200 Subject: [PATCH 274/306] Update shapeless to 2.3.10 (#1078) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index d4b474a1a..0137d8bfc 100644 --- a/build.sbt +++ b/build.sbt @@ -296,7 +296,7 @@ lazy val algebirdGeneric = module("generic") .settings( crossScalaVersions += "2.13.8", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % "2.3.9", + "com.chuusai" %% "shapeless" % "2.3.10", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" ) ++ { if (isScala213x(scalaVersion.value)) { From efd19a1c602cbdfea39a8bbad30f224fd614cf8f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 15 Oct 2022 23:16:27 +0200 Subject: [PATCH 275/306] Update sbt-scoverage to 2.0.5 (#1087) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 11663c638..e2245046d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.3") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 9031aba36314bbfdc83c85940f1f219d595779c0 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 15 Oct 2022 23:16:39 +0200 Subject: [PATCH 276/306] Update scalatest to 3.2.14 (#1086) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 0137d8bfc..f0bebc381 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.13" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.13" +val scalaTestVersion = "3.2.14" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.8.1" From 6b1e93e05a7eb1a41350ab7e2014fdd0995498b4 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 15 Oct 2022 23:16:53 +0200 Subject: [PATCH 277/306] Update sbt-scalafix to 0.10.4 (#1083) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index e2245046d..bb80ac73d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,7 +12,7 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") dependencyOverrides += "org.scala-lang.modules" %% "scala-xml" % "2.1.0" From f8952a2f4367219578e7c653598278db0bb76796 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sun, 23 Oct 2022 01:59:57 +0200 Subject: [PATCH 278/306] Update scalafmt-core to 3.6.0 (#1088) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index fe6f27286..c9f903c4f 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.5.9 +version=3.6.0 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { From 3cefba6a62d27924e2348261e21e95471cfabb47 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 28 Nov 2022 10:35:38 +0100 Subject: [PATCH 279/306] Update sbt-scalafmt to 2.5.0 (#1091) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index bb80ac73d..534e41ac1 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,7 +7,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") From 7ebf35f7a7d94e39313f788c2b266725562042e9 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Fri, 20 Jan 2023 18:21:31 +0100 Subject: [PATCH 280/306] Update sbt-mima-plugin to 1.1.1 (#1081) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 534e41ac1..9b0eefa50 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") From 48ac165e191d5a48d5015d075fcf436f39daed8f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 30 Jan 2023 19:11:27 +0100 Subject: [PATCH 281/306] Update scalatest to 3.2.15 (#1098) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f0bebc381..c914738e9 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.1.13" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.14" +val scalaTestVersion = "3.2.15" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.8.1" From 0af53c22d87757a6b463226d78ea9b7785e5a3ad Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 30 Jan 2023 19:12:38 +0100 Subject: [PATCH 282/306] Update scala-collection-compat to 2.9.0 (#1095) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index c914738e9..afc7de9c7 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.15" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.8.1" +val scalaCollectionCompat = "2.9.0" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From b2bcf8a623850a2831f1f48d71c51c87ebf6ce0e Mon Sep 17 00:00:00 2001 From: "P. Oscar Boykin" Date: Mon, 30 Jan 2023 08:35:57 -1000 Subject: [PATCH 283/306] Try to use ruby 2.7 for CI (#1101) * Try to use ruby 2.7 for CI * try to actually read the error message --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a1738db28..244a8451f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -69,7 +69,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Ruby setup - uses: actions/setup-ruby@v1.1.3 + uses: ruby/setup-ruby@v1 with: ruby-version: 2.6 - run: | From 4a0016b352c5c3947a5a58e5bc6f808b3c97488f Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 4 Mar 2023 15:09:49 +0100 Subject: [PATCH 284/306] Update sbt-jmh to 0.4.4 (#1102) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 9b0eefa50..c446a5397 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,7 +11,7 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.4") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 53246eee6c68a76fb4a81f6a541895d0370522f5 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 4 Mar 2023 15:33:06 +0100 Subject: [PATCH 285/306] Update sbt-scoverage to 2.0.7 (#1103) Co-authored-by: Filipe Regadas --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index c446a5397..d16002044 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.5") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.4") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") From 53d7d1cb20b717ebe88cd22a77b5cd1c3f73a889 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 4 Mar 2023 15:07:39 +0000 Subject: [PATCH 286/306] Update sbt-microsites to v1.4.2 (#1106) Co-authored-by: Scala Steward --- build.sbt | 4 ++-- project/plugins.sbt | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index afc7de9c7..16e262c7a 100644 --- a/build.sbt +++ b/build.sbt @@ -333,6 +333,7 @@ lazy val docSettings = Seq( "gray-lighter" -> "#F4F3F4", "white-color" -> "#FFFFFF" ), + micrositePushSiteWith := GHPagesPlugin, autoAPIMappings := true, docsMappingsAPIDir := "api", addMappingsToSiteDir(ScalaUnidoc / packageDoc / mappings, docsMappingsAPIDir), @@ -345,14 +346,13 @@ lazy val docSettings = Seq( (LocalRootProject / baseDirectory).value.getAbsolutePath, "-diagrams" ), - git.remoteRepo := "git@github.com:twitter/algebird.git", makeSite / includeFilter := "*.html" | "*.css" | "*.png" | "*.jpg" | "*.gif" | "*.js" | "*.swf" | "*.yml" | "*.md" ) // Documentation is generated for projects defined in // `docsSourcesAndProjects`. lazy val docs = project - .enablePlugins(MicrositesPlugin, MdocPlugin, ScalaUnidocPlugin, GhpagesPlugin) + .enablePlugins(MicrositesPlugin, MdocPlugin, ScalaUnidocPlugin) .settings(moduleName := "algebird-docs") .settings(sharedSettings) .settings(noPublishSettings) diff --git a/project/plugins.sbt b/project/plugins.sbt index d16002044..9162c53a6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,14 +5,13 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.2") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") -addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.4") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.10") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11") dependencyOverrides += "org.scala-lang.modules" %% "scala-xml" % "2.1.0" From 65a95ce440b9c30101db5528cf1a90ec02361142 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 5 Mar 2023 09:22:48 +0000 Subject: [PATCH 287/306] Remove build deprecation warnings (#1107) --- .../bijection/AlgebirdBijections.scala | 4 +- .../com/twitter/algebird/compat.scala | 2 +- .../com/twitter/algebird/AdaptiveCache.scala | 10 +- .../com/twitter/algebird/AdaptiveVector.scala | 2 +- .../com/twitter/algebird/Aggregator.scala | 6 +- .../com/twitter/algebird/Applicative.scala | 4 +- .../com/twitter/algebird/AveragedValue.scala | 9 +- .../scala/com/twitter/algebird/Batched.scala | 4 +- .../com/twitter/algebird/BloomFilter.scala | 10 +- .../twitter/algebird/BufferedOperation.scala | 2 +- .../twitter/algebird/CorrelationMonoid.scala | 6 +- .../com/twitter/algebird/CountMinSketch.scala | 20 +- .../com/twitter/algebird/DecayingCMS.scala | 21 +- .../com/twitter/algebird/Eventually.scala | 2 +- .../scala/com/twitter/algebird/ExpHist.scala | 2 +- .../scala/com/twitter/algebird/First.scala | 2 +- .../scala/com/twitter/algebird/Fold.scala | 4 +- .../algebird/GeneratedAbstractAlgebra.scala | 3479 ++++++++--------- .../com/twitter/algebird/HyperLogLog.scala | 6 +- .../twitter/algebird/HyperLogLogSeries.scala | 4 +- .../com/twitter/algebird/JavaMonoids.scala | 2 +- .../com/twitter/algebird/MapAlgebra.scala | 2 +- .../main/scala/com/twitter/algebird/Max.scala | 4 +- .../com/twitter/algebird/MinHasher.scala | 2 +- .../scala/com/twitter/algebird/QTree.scala | 8 +- .../scala/com/twitter/algebird/Scan.scala | 4 +- .../com/twitter/algebird/SketchMap.scala | 2 +- .../com/twitter/algebird/SpaceSaver.scala | 2 +- .../com/twitter/algebird/SummingCache.scala | 4 +- .../twitter/algebird/SummingIterator.scala | 6 +- .../com/twitter/algebird/SummingQueue.scala | 2 +- .../scala/com/twitter/algebird/Window.scala | 4 +- .../algebird/matrix/AdaptiveMatrix.scala | 4 +- .../algebird/matrix/SparseColumnMatrix.scala | 2 +- .../mutable/PriorityQueueAggregator.scala | 2 +- .../GaussianDistributionMonoid.scala | 2 +- .../statistics/IterCallStatistics.scala | 6 +- .../algebird/statistics/Statistics.scala | 12 +- .../twitter/algebird/generic/Instances.scala | 5 +- .../macros/ArbitraryCaseClassMacro.scala | 1 - .../com/twitter/algebird/JavaBoxedTests.scala | 2 +- .../com/twitter/algebird/TopKTests.scala | 2 +- .../util/summer/AsyncListMMapSum.scala | 9 +- .../algebird/util/summer/AsyncListSum.scala | 17 +- .../algebird/util/summer/AsyncMapSum.scala | 9 +- .../algebird/util/summer/AsyncSummer.scala | 4 +- .../summer/HeavyHittersCachingSummer.scala | 12 +- .../algebird/util/summer/NullSummer.scala | 6 +- .../util/summer/SyncSummingQueue.scala | 11 +- build.sbt | 4 - scripts/ntuple_generators.rb | 6 +- 51 files changed, 1818 insertions(+), 1939 deletions(-) diff --git a/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala b/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala index 67f7b0bf0..c72a3ae81 100644 --- a/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala +++ b/algebird-bijection/src/main/scala/com/twitter/algebird/bijection/AlgebirdBijections.scala @@ -19,6 +19,8 @@ package com.twitter.algebird.bijection import com.twitter.algebird.{Group, InvariantSemigroup, Monoid, Ring, Semigroup} import com.twitter.bijection.{AbstractBijection, Bijection, ImplicitBijection, Reverse} +import scala.collection.compat._ + /** * Bijections on Algebird's abstract algebra datatypes. * @@ -53,7 +55,7 @@ class BijectedRing[T, U](implicit val ring: Ring[T], bij: ImplicitBijection[T, U override def times(l: U, r: U): U = bijection.invert(ring.times(bijection(l), bijection(r))) override def product(iter: TraversableOnce[U]): U = - bijection.invert(ring.product(iter.map(bijection.toFunction))) + bijection.invert(ring.product(iter.iterator.map(bijection.toFunction))) } trait AlgebirdBijections { diff --git a/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala b/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala index e142c39fe..6c2f08640 100644 --- a/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala +++ b/algebird-core/src/main/scala-2.13+/com/twitter/algebird/compat.scala @@ -24,7 +24,7 @@ private[algebird] trait CompatFold { Fold.foldMutable[Builder[I, C[I]], I, C[I]]( { case (b, i) => b += i }, _ => cbf.newBuilder, - _.result + _.result() ) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala index 29329b788..0ff02886a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveCache.scala @@ -33,9 +33,9 @@ class SentinelCache[K, V](implicit sgv: Semigroup[V]) { def size: Int = map.get.map(_.size).getOrElse(0) - def clear(): Unit = map.get.foreach(_.clear) + def clear(): Unit = map.get.foreach(_.clear()) - def stopGrowing(): Unit = map.clear + def stopGrowing(): Unit = map.clear() def put(in: Map[K, V]): Unit = if (map.get.isDefined) { @@ -87,9 +87,9 @@ class AdaptiveCache[K, V: Semigroup](maxCapacity: Int, growthMargin: Double = 3. summingCache = new SummingWithHitsCache(currentCapacity) if (currentCapacity == maxCapacity) - sentinelCache.stopGrowing + sentinelCache.stopGrowing() else - sentinelCache.clear + sentinelCache.clear() } ret } @@ -101,7 +101,7 @@ class AdaptiveCache[K, V: Semigroup](maxCapacity: Int, growthMargin: Double = 3. override def flush: Option[Map[K, V]] = { val ret = summingCache.flush - sentinelCache.clear + sentinelCache.clear() ret } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala index e47fb8792..31f5117bc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AdaptiveVector.scala @@ -145,7 +145,7 @@ object AdaptiveVector { def iteq: Boolean = (lit.hasNext, rit.hasNext) match { case (true, true) => - val (lnext, rnext) = (lit.next, rit.next) + val (lnext, rnext) = (lit.next(), rit.next()) if (lnext._1 == rnext._1 && Equiv[V].equiv(lnext._2, rnext._2)) iteq else diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala index 4e78d234b..a64ce4033 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Aggregator.scala @@ -114,7 +114,7 @@ object Aggregator extends java.io.Serializable { if (inputs.iterator.isEmpty) None else { val itr = inputs.iterator - val t = prepare(itr.next) + val t = prepare(itr.next()) Some(itr.foldLeft(t)(appnd)) } } @@ -155,7 +155,7 @@ object Aggregator extends java.io.Serializable { override def apply(inputs: TraversableOnce[F]): P = present(agg(inputs)) override def applyOption(inputs: TraversableOnce[F]): Option[P] = - if (inputs.isEmpty) None else Some(apply(inputs)) + if (inputs.iterator.isEmpty) None else Some(apply(inputs)) override def append(l: T, r: F): T = appnd(l, r) @@ -165,7 +165,7 @@ object Aggregator extends java.io.Serializable { override def appendAll(items: TraversableOnce[F]): T = agg(items) private def agg(inputs: TraversableOnce[F]): T = - inputs.foldLeft(m.zero)(append) + inputs.iterator.foldLeft(m.zero)(append) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala index 32a66339a..211cac612 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Applicative.scala @@ -42,7 +42,7 @@ trait Applicative[M[_]] extends Functor[M] { case _ => val mb = ms.foldLeft(apply(Seq.newBuilder[T]))((mb, mt) => joinWith(mb, mt)((b, t) => b += t)) - map(mb)(_.result) + map(mb)(_.result()) } def joinWith[T, U, V](mt: M[T], mu: M[U])(fn: (T, U) => V): M[V] = map(join(mt, mu)) { case (t, u) => fn(t, u) } @@ -102,7 +102,7 @@ object Applicative { )(implicit app: Applicative[M], cbf: Factory[T, R[T]]): M[R[T]] = { val bldr = cbf.newBuilder val mbldr = ms.iterator.foldLeft(app.apply(bldr))((mb, mt) => app.joinWith(mb, mt)(_ += _)) - app.map(mbldr)(_.result) + app.map(mbldr)(_.result()) } def joinWith[M[_], T, U, V](mt: M[T], mu: M[U])(fn: (T, U) => V)(implicit app: Applicative[M]): M[V] = diff --git a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala index 9d684db79..f9f0bb0d8 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/AveragedValue.scala @@ -31,7 +31,7 @@ import scala.collection.compat._ * As long as your count doesn't overflow a Long, the mean calculation won't overflow. * * @see - * [[MomentsGroup.getCombinedMean]] for implementation of [[+]] + * [[Moments.getCombinedMeanDouble]] for implementation of [[+]] * @param count * the number of aggregated items * @param value @@ -77,7 +77,7 @@ case class AveragedValue(count: Long, value: Double) { * an instance representing the mean of this instance and `that`. */ def +(that: Double): AveragedValue = - AveragedValue(count + 1L, MomentsGroup.getCombinedMean(count, value, 1L, that)) + AveragedValue(count + 1L, Moments.getCombinedMeanDouble(count.toDouble, value, 1L, that)) /** * Returns a new instance that averages `that` into this instance. @@ -140,7 +140,6 @@ object AveragedValue { * `AveragedValue` */ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[AveragedValue] { - import MomentsGroup.getCombinedMean override val zero: AveragedValue = AveragedValue(0L, 0.0) @@ -160,7 +159,7 @@ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[Averaged val it = iter.toIterator while (it.hasNext) { val av = it.next() - average = getCombinedMean(count, average, av.count, av.value) + average = Moments.getCombinedMeanDouble(count.toDouble, average, av.count.toDouble, av.value) count += av.count } Some(AveragedValue(count, average)) @@ -172,7 +171,7 @@ object AveragedGroup extends Group[AveragedValue] with CommutativeGroup[Averaged override def plus(l: AveragedValue, r: AveragedValue): AveragedValue = { val n = l.count val k = r.count - val newAve = getCombinedMean(n, l.value, k, r.value) + val newAve = Moments.getCombinedMeanDouble(n.toDouble, l.value, k.toDouble, r.value) AveragedValue(n + k, newAve) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala b/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala index d209a98dc..91fee5556 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Batched.scala @@ -45,7 +45,7 @@ sealed abstract class Batched[T] extends Serializable { * This method will grow the tree to the left. */ def append(that: TraversableOnce[T]): Batched[T] = - that.foldLeft(this)((b, t) => b.combine(Batched(t))) + that.iterator.foldLeft(this)((b, t) => b.combine(Batched(t))) /** * Provide an iterator over the underlying tree structure. @@ -104,7 +104,7 @@ object Batched { if (ts.iterator.isEmpty) None else { val it = ts.iterator - val t0 = it.next + val t0 = it.next() Some(Item(t0).append(it)) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala index bda97981d..292e5683b 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BloomFilter.scala @@ -33,7 +33,7 @@ object RichCBitSet { def fromBitSet(bs: BitSet): CBitSet = { val nbs = new CBitSet val it = bs.iterator - while (it.hasNext) { nbs.set(it.next) } + while (it.hasNext) { nbs.set(it.next()) } nbs } implicit def cb2rcb(cb: CBitSet): RichCBitSet = new RichCBitSet(cb) @@ -60,7 +60,7 @@ class RichCBitSet(val cb: CBitSet) extends AnyVal { val a = LongBitSet.empty(width) val iter = cb.intIterator while (iter.hasNext) { - val i = iter.next + val i = iter.next() a.set(i) } a.toBitSetNoCopy @@ -235,7 +235,7 @@ case class BloomFilterMonoid[A](numHashes: Int, width: Int)(implicit hash: Hash1 case BFInstance(_, bitset, _) => // these Ints are boxed so, that's a minor bummer val iter = bitset.iterator - while (iter.hasNext) { set(iter.next) } + while (iter.hasNext) { set(iter.next()) } } if (sets == 0) Some(zero) else if (sets == numHashes && (oneItem != null)) Some(oneItem) @@ -307,7 +307,7 @@ object BF { new IntIterator { val boxedIter: Iterator[Int] = bitset.iterator override def hasNext: Boolean = boxedIter.hasNext - override def next: Int = boxedIter.next + override def next: Int = boxedIter.next() } case BFZero(_, _) => new IntIterator { @@ -484,7 +484,7 @@ case class BFSparse[A](hashes: BFHash[A], bits: CBitSet, override val width: Int var count = 0 while (it.hasNext) { count += 1 - it.next + it.next() } count } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala b/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala index e8c45b668..102f2e3c7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/BufferedOperation.scala @@ -45,7 +45,7 @@ abstract class ArrayBufferedOperation[I, O](size: Int) extends Buffered[I, O] { if (buffer.isEmpty) None else { val res = operate(buffer.toSeq) - buffer.clear + buffer.clear() Some(res) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala index a4274ebc8..6fce87943 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CorrelationMonoid.scala @@ -1,5 +1,7 @@ package com.twitter.algebird +import scala.collection.compat._ + object Correlation { def apply(x: (Double, Double), weight: Double): Correlation = Correlation(c2 = 0, m2x = 0, m2y = 0, m1x = x._1, m1y = x._2, weight) @@ -113,9 +115,9 @@ object CorrelationMonoid extends Monoid[Correlation] { override val zero: Correlation = Correlation(0, 0, 0, 0, 0, 0) override def sumOption(cs: TraversableOnce[Correlation]): Option[Correlation] = - if (cs.isEmpty) None + if (cs.iterator.isEmpty) None else { - val iter = cs.toIterator + val iter = cs.iterator val item = iter.next() var m0 = item.m0 diff --git a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala index f000c7fe3..809d8785f 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/CountMinSketch.scala @@ -185,9 +185,9 @@ class CMSSummation[K](params: CMSParams[K]) { val rit = matrix.iterator while (rit.hasNext) { var col = 0 - val cit = rit.next.iterator + val cit = rit.next().iterator while (cit.hasNext) { - cells(offset + col) += cit.next + cells(offset + col) += cit.next() col += 1 } offset += width @@ -206,7 +206,7 @@ class CMSSummation[K](params: CMSParams[K]) { b += cells(offset + col) col += 1 } - b.result + b.result() } val b = Vector.newBuilder[Vector[Long]] @@ -215,7 +215,7 @@ class CMSSummation[K](params: CMSParams[K]) { b += vectorize(row) row += 1 } - CMSInstance(CMSInstance.CountsTable(b.result), totalCount, params) + CMSInstance(CMSInstance.CountsTable(b.result()), totalCount, params) } } @@ -724,7 +724,7 @@ case class CMSInstance[K]( val it = countsTable.counts.iterator var i = 0 while (it.hasNext) { - val row = it.next + val row = it.next() val count = row(hs(i)(item)) if (count < freq) freq = count i += 1 @@ -817,13 +817,13 @@ object CMSInstance { val yss = other.counts.iterator val rows = Vector.newBuilder[Vector[Long]] while (xss.hasNext) { - val xs = xss.next.iterator - val ys = yss.next.iterator + val xs = xss.next().iterator + val ys = yss.next().iterator val row = Vector.newBuilder[Long] - while (xs.hasNext) row += (xs.next + ys.next) - rows += row.result + while (xs.hasNext) row += (xs.next() + ys.next()) + rows += row.result() } - CountsTable[K](rows.result) + CountsTable[K](rows.result()) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala index 2b6a5f157..7e2a27d35 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/DecayingCMS.scala @@ -5,6 +5,7 @@ import java.lang.Math import java.util.Arrays.deepHashCode import scala.concurrent.duration.Duration import scala.util.Random +import scala.collection.compat._ /** * DecayingCMS is a module to build count-min sketch instances whose counts decay exponentially. @@ -210,7 +211,7 @@ final class DecayingCMS[K]( val hashFns: Array[K => Int] = { val rng = new Random(seed) def genPos(): Int = - rng.nextInt match { + rng.nextInt() match { case 0 => genPos() case n => n & 0x7fffffff } @@ -323,10 +324,10 @@ final class DecayingCMS[K]( var i = 0 while (i < cells.length) { val it = cells(i).iterator - var localMax = it.next // we know it doesn't start empty + var localMax = it.next() // we know it doesn't start empty if (localMax < minMinimum) minMinimum = localMax while (it.hasNext) { - val n = it.next + val n = it.next() if (n > localMax) localMax = n else if (n < minMinimum) minMinimum = n } @@ -362,7 +363,7 @@ final class DecayingCMS[K]( val it0 = this.cells(i).iterator val it1 = that.cells(i).iterator while (it0.hasNext) { - val x = it0.next * it1.next + val x = it0.next() * it1.next() if (x != 0.0) sum += x } if (sum < res) res = sum @@ -426,7 +427,7 @@ final class DecayingCMS[K]( val x = this val y = other val timeInHL = Math.max(x.timeInHL, y.timeInHL) - val cms = new CMS(allocCells, 0.0, timeInHL) + val cms = new CMS(allocCells(), 0.0, timeInHL) val xscale = x.getScale(timeInHL) val yscale = y.getScale(timeInHL) @@ -445,7 +446,7 @@ final class DecayingCMS[K]( bldr += prod(left(j), xscale) + prod(right(j), yscale) j += 1 } - cms.cells(i) = bldr.result + cms.cells(i) = bldr.result() i += 1 } cms @@ -505,7 +506,7 @@ final class DecayingCMS[K]( if (expL == 0.0) { new CMS(monoid.zero.cells, 0.0, ts) } else { - val cms = new CMS(allocCells, 0.0, ts) + val cms = new CMS(allocCells(), 0.0, ts) var i = 0 while (i < depth) { val ci = cells(i) @@ -547,7 +548,7 @@ final class DecayingCMS[K]( bldr += scratch(j) j += 1 } - cells(i) = bldr.result + cells(i) = bldr.result() i += 1 } cells @@ -595,7 +596,7 @@ final class DecayingCMS[K]( override def sumOption(xs: TraversableOnce[CMS]): Option[CMS] = { - val it: Iterator[CMS] = xs.toIterator + val it: Iterator[CMS] = xs.iterator val ChunkSize = 1000 // the idea here is that we read up to 1000 CMS values into @@ -606,7 +607,7 @@ final class DecayingCMS[K]( val arr = new Array[CMS](ChunkSize) while (it.hasNext) { while (it.hasNext && i < ChunkSize) { - arr(i) = it.next + arr(i) = it.next() i += 1 } if (i > 1) { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala index 19522f589..98492e196 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Eventually.scala @@ -70,7 +70,7 @@ class EventuallySemigroup[E, O](convert: O => E)(mustConvert: O => Boolean)(impl private[this] final def checkSize[T: Semigroup](buffer: Buffer[T]): Unit = if (buffer.size > maxBuffer) { val sum = Semigroup.sumOption(buffer) - buffer.clear + buffer.clear() sum.foreach(buffer += _) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala index 2f6d6e988..3a1ebfe2a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/ExpHist.scala @@ -105,7 +105,7 @@ case class ExpHist( b += bucket }, _ => Vector.newBuilder[Bucket], - x => addAll(x.result) + x => addAll(x.result()) ) // This internal method assumes that the instance is stepped forward diff --git a/algebird-core/src/main/scala/com/twitter/algebird/First.scala b/algebird-core/src/main/scala/com/twitter/algebird/First.scala index 115c2e56b..b34c9528e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/First.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/First.scala @@ -63,7 +63,7 @@ private[algebird] sealed abstract class FirstInstances { override def plus(l: T, r: T): T = l override def sumOption(iter: TraversableOnce[T]): Option[T] = - if (iter.iterator.isEmpty) None else Some(iter.iterator.next) + if (iter.iterator.isEmpty) None else Some(iter.iterator.next()) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala index c2f21d145..ded32e628 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Fold.scala @@ -66,8 +66,8 @@ sealed trait Fold[-I, +O] extends Serializable { val self = this new Fold[I, P] { type X = self.X - override def build: FoldState[X, I, P] = - self.build.map(f) + override def build(): FoldState[X, I, P] = + self.build().map(f) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala index 8da148093..4d2cb08ab 100755 --- a/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/GeneratedAbstractAlgebra.scala @@ -1,4 +1,4 @@ -// following were autogenerated by scripts/ntuple_generators.rb at 2016-12-02 19:43:04 -0700 do not edit +// following were autogenerated by scripts/ntuple_generators.rb at 2023-03-04 16:46:41 +0000 do not edit package com.twitter.algebird import ArrayBufferedOperation.fromSumOption @@ -10,14 +10,13 @@ import scala.collection.compat._ */ class Tuple2Semigroup[A, B](implicit asemigroup: Semigroup[A], bsemigroup: Semigroup[B]) extends Semigroup[(A, B)] { - override def plus(l: (A, B), r: (A, B)): (A, B) = - (asemigroup.plus(l._1, r._1), bsemigroup.plus(l._2, r._2)) + override def plus(l: (A, B), r: (A, B)): (A, B) = (asemigroup.plus(l._1, r._1), bsemigroup.plus(l._2, r._2)) override def sumOption(to: TraversableOnce[(A, B)]): Option[(A, B)] = if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) - to.foreach { tuple => bufA.put(tuple._1); bufB.put(tuple._2) } + to.iterator.foreach { tuple => bufA.put(tuple._1); bufB.put(tuple._2) } Some((bufA.flush.get, bufB.flush.get)) } } @@ -38,8 +37,7 @@ class Tuple2Group[A, B](implicit agroup: Group[A], bgroup: Group[B]) extends Tuple2Monoid[A, B] with Group[(A, B)] { override def negate(v: (A, B)): (A, B) = (agroup.negate(v._1), bgroup.negate(v._2)) - override def minus(l: (A, B), r: (A, B)): (A, B) = - (agroup.minus(l._1, r._1), bgroup.minus(l._2, r._2)) + override def minus(l: (A, B), r: (A, B)): (A, B) = (agroup.minus(l._1, r._1), bgroup.minus(l._2, r._2)) } /** @@ -47,8 +45,7 @@ class Tuple2Group[A, B](implicit agroup: Group[A], bgroup: Group[B]) */ class Tuple2Ring[A, B](implicit aring: Ring[A], bring: Ring[B]) extends Tuple2Group[A, B] with Ring[(A, B)] { override def one: (A, B) = (aring.one, bring.one) - override def times(l: (A, B), r: (A, B)): (A, B) = - (aring.times(l._1, r._1), bring.times(l._2, r._2)) + override def times(l: (A, B), r: (A, B)): (A, B) = (aring.times(l._1, r._1), bring.times(l._2, r._2)) } /** @@ -62,12 +59,12 @@ class Tuple3Semigroup[A, B, C](implicit override def plus(l: (A, B, C), r: (A, B, C)): (A, B, C) = (asemigroup.plus(l._1, r._1), bsemigroup.plus(l._2, r._2), csemigroup.plus(l._3, r._3)) override def sumOption(to: TraversableOnce[(A, B, C)]): Option[(A, B, C)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) val bufC = fromSumOption[C](1000) - to.foreach { tuple => bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3) } + to.iterator.foreach { tuple => bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3) } Some((bufA.flush.get, bufB.flush.get, bufC.flush.get)) } } @@ -113,23 +110,21 @@ class Tuple4Semigroup[A, B, C, D](implicit csemigroup: Semigroup[C], dsemigroup: Semigroup[D] ) extends Semigroup[(A, B, C, D)] { - override def plus(l: (A, B, C, D), r: (A, B, C, D)): (A, B, C, D) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4) - ) + override def plus(l: (A, B, C, D), r: (A, B, C, D)): (A, B, C, D) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4) + ) override def sumOption(to: TraversableOnce[(A, B, C, D)]): Option[(A, B, C, D)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) val bufC = fromSumOption[C](1000) val bufD = fromSumOption[D](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4) } Some((bufA.flush.get, bufB.flush.get, bufC.flush.get, bufD.flush.get)) } @@ -181,25 +176,23 @@ class Tuple5Semigroup[A, B, C, D, E](implicit dsemigroup: Semigroup[D], esemigroup: Semigroup[E] ) extends Semigroup[(A, B, C, D, E)] { - override def plus(l: (A, B, C, D, E), r: (A, B, C, D, E)): (A, B, C, D, E) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5) - ) + override def plus(l: (A, B, C, D, E), r: (A, B, C, D, E)): (A, B, C, D, E) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5) + ) override def sumOption(to: TraversableOnce[(A, B, C, D, E)]): Option[(A, B, C, D, E)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) val bufC = fromSumOption[C](1000) val bufD = fromSumOption[D](1000) val bufE = fromSumOption[E](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5) } Some((bufA.flush.get, bufB.flush.get, bufC.flush.get, bufD.flush.get, bufE.flush.get)) } @@ -216,8 +209,7 @@ class Tuple5Monoid[A, B, C, D, E](implicit emonoid: Monoid[E] ) extends Tuple5Semigroup[A, B, C, D, E] with Monoid[(A, B, C, D, E)] { - override def zero: (A, B, C, D, E) = - (amonoid.zero, bmonoid.zero, cmonoid.zero, dmonoid.zero, emonoid.zero) + override def zero: (A, B, C, D, E) = (amonoid.zero, bmonoid.zero, cmonoid.zero, dmonoid.zero, emonoid.zero) } /** @@ -233,14 +225,13 @@ class Tuple5Group[A, B, C, D, E](implicit with Group[(A, B, C, D, E)] { override def negate(v: (A, B, C, D, E)): (A, B, C, D, E) = (agroup.negate(v._1), bgroup.negate(v._2), cgroup.negate(v._3), dgroup.negate(v._4), egroup.negate(v._5)) - override def minus(l: (A, B, C, D, E), r: (A, B, C, D, E)): (A, B, C, D, E) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5) - ) + override def minus(l: (A, B, C, D, E), r: (A, B, C, D, E)): (A, B, C, D, E) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5) + ) } /** @@ -255,14 +246,13 @@ class Tuple5Ring[A, B, C, D, E](implicit ) extends Tuple5Group[A, B, C, D, E] with Ring[(A, B, C, D, E)] { override def one: (A, B, C, D, E) = (aring.one, bring.one, cring.one, dring.one, ering.one) - override def times(l: (A, B, C, D, E), r: (A, B, C, D, E)): (A, B, C, D, E) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5) - ) + override def times(l: (A, B, C, D, E), r: (A, B, C, D, E)): (A, B, C, D, E) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5) + ) } /** @@ -276,17 +266,16 @@ class Tuple6Semigroup[A, B, C, D, E, F](implicit esemigroup: Semigroup[E], fsemigroup: Semigroup[F] ) extends Semigroup[(A, B, C, D, E, F)] { - override def plus(l: (A, B, C, D, E, F), r: (A, B, C, D, E, F)): (A, B, C, D, E, F) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6) - ) + override def plus(l: (A, B, C, D, E, F), r: (A, B, C, D, E, F)): (A, B, C, D, E, F) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6) + ) override def sumOption(to: TraversableOnce[(A, B, C, D, E, F)]): Option[(A, B, C, D, E, F)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -294,9 +283,9 @@ class Tuple6Semigroup[A, B, C, D, E, F](implicit val bufD = fromSumOption[D](1000) val bufE = fromSumOption[E](1000) val bufF = fromSumOption[F](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6) } Some((bufA.flush.get, bufB.flush.get, bufC.flush.get, bufD.flush.get, bufE.flush.get, bufF.flush.get)) } @@ -330,24 +319,22 @@ class Tuple6Group[A, B, C, D, E, F](implicit fgroup: Group[F] ) extends Tuple6Monoid[A, B, C, D, E, F] with Group[(A, B, C, D, E, F)] { - override def negate(v: (A, B, C, D, E, F)): (A, B, C, D, E, F) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6) - ) - override def minus(l: (A, B, C, D, E, F), r: (A, B, C, D, E, F)): (A, B, C, D, E, F) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6) - ) + override def negate(v: (A, B, C, D, E, F)): (A, B, C, D, E, F) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6) + ) + override def minus(l: (A, B, C, D, E, F), r: (A, B, C, D, E, F)): (A, B, C, D, E, F) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6) + ) } /** @@ -362,17 +349,15 @@ class Tuple6Ring[A, B, C, D, E, F](implicit fring: Ring[F] ) extends Tuple6Group[A, B, C, D, E, F] with Ring[(A, B, C, D, E, F)] { - override def one: (A, B, C, D, E, F) = - (aring.one, bring.one, cring.one, dring.one, ering.one, fring.one) - override def times(l: (A, B, C, D, E, F), r: (A, B, C, D, E, F)): (A, B, C, D, E, F) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6) - ) + override def one: (A, B, C, D, E, F) = (aring.one, bring.one, cring.one, dring.one, ering.one, fring.one) + override def times(l: (A, B, C, D, E, F), r: (A, B, C, D, E, F)): (A, B, C, D, E, F) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6) + ) } /** @@ -387,18 +372,17 @@ class Tuple7Semigroup[A, B, C, D, E, F, G](implicit fsemigroup: Semigroup[F], gsemigroup: Semigroup[G] ) extends Semigroup[(A, B, C, D, E, F, G)] { - override def plus(l: (A, B, C, D, E, F, G), r: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7) - ) + override def plus(l: (A, B, C, D, E, F, G), r: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7) + ) override def sumOption(to: TraversableOnce[(A, B, C, D, E, F, G)]): Option[(A, B, C, D, E, F, G)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -407,10 +391,9 @@ class Tuple7Semigroup[A, B, C, D, E, F, G](implicit val bufE = fromSumOption[E](1000) val bufF = fromSumOption[F](1000) val bufG = fromSumOption[G](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7) } Some( ( @@ -456,26 +439,24 @@ class Tuple7Group[A, B, C, D, E, F, G](implicit ggroup: Group[G] ) extends Tuple7Monoid[A, B, C, D, E, F, G] with Group[(A, B, C, D, E, F, G)] { - override def negate(v: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7) - ) - override def minus(l: (A, B, C, D, E, F, G), r: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7) - ) + override def negate(v: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7) + ) + override def minus(l: (A, B, C, D, E, F, G), r: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7) + ) } /** @@ -493,16 +474,15 @@ class Tuple7Ring[A, B, C, D, E, F, G](implicit with Ring[(A, B, C, D, E, F, G)] { override def one: (A, B, C, D, E, F, G) = (aring.one, bring.one, cring.one, dring.one, ering.one, fring.one, gring.one) - override def times(l: (A, B, C, D, E, F, G), r: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7) - ) + override def times(l: (A, B, C, D, E, F, G), r: (A, B, C, D, E, F, G)): (A, B, C, D, E, F, G) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7) + ) } /** @@ -518,19 +498,18 @@ class Tuple8Semigroup[A, B, C, D, E, F, G, H](implicit gsemigroup: Semigroup[G], hsemigroup: Semigroup[H] ) extends Semigroup[(A, B, C, D, E, F, G, H)] { - override def plus(l: (A, B, C, D, E, F, G, H), r: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8) - ) + override def plus(l: (A, B, C, D, E, F, G, H), r: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8) + ) override def sumOption(to: TraversableOnce[(A, B, C, D, E, F, G, H)]): Option[(A, B, C, D, E, F, G, H)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -540,10 +519,9 @@ class Tuple8Semigroup[A, B, C, D, E, F, G, H](implicit val bufF = fromSumOption[F](1000) val bufG = fromSumOption[G](1000) val bufH = fromSumOption[H](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8) } Some( ( @@ -574,17 +552,16 @@ class Tuple8Monoid[A, B, C, D, E, F, G, H](implicit hmonoid: Monoid[H] ) extends Tuple8Semigroup[A, B, C, D, E, F, G, H] with Monoid[(A, B, C, D, E, F, G, H)] { - override def zero: (A, B, C, D, E, F, G, H) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero + ) } /** @@ -601,28 +578,26 @@ class Tuple8Group[A, B, C, D, E, F, G, H](implicit hgroup: Group[H] ) extends Tuple8Monoid[A, B, C, D, E, F, G, H] with Group[(A, B, C, D, E, F, G, H)] { - override def negate(v: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8) - ) - override def minus(l: (A, B, C, D, E, F, G, H), r: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8) - ) + override def negate(v: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8) + ) + override def minus(l: (A, B, C, D, E, F, G, H), r: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8) + ) } /** @@ -641,17 +616,16 @@ class Tuple8Ring[A, B, C, D, E, F, G, H](implicit with Ring[(A, B, C, D, E, F, G, H)] { override def one: (A, B, C, D, E, F, G, H) = (aring.one, bring.one, cring.one, dring.one, ering.one, fring.one, gring.one, hring.one) - override def times(l: (A, B, C, D, E, F, G, H), r: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8) - ) + override def times(l: (A, B, C, D, E, F, G, H), r: (A, B, C, D, E, F, G, H)): (A, B, C, D, E, F, G, H) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8) + ) } /** @@ -671,22 +645,21 @@ class Tuple9Semigroup[A, B, C, D, E, F, G, H, I](implicit override def plus( l: (A, B, C, D, E, F, G, H, I), r: (A, B, C, D, E, F, G, H, I) - ): (A, B, C, D, E, F, G, H, I) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9) - ) + ): (A, B, C, D, E, F, G, H, I) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I)] ): Option[(A, B, C, D, E, F, G, H, I)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -697,10 +670,9 @@ class Tuple9Semigroup[A, B, C, D, E, F, G, H, I](implicit val bufG = fromSumOption[G](1000) val bufH = fromSumOption[H](1000) val bufI = fromSumOption[I](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9) } Some( ( @@ -733,18 +705,17 @@ class Tuple9Monoid[A, B, C, D, E, F, G, H, I](implicit imonoid: Monoid[I] ) extends Tuple9Semigroup[A, B, C, D, E, F, G, H, I] with Monoid[(A, B, C, D, E, F, G, H, I)] { - override def zero: (A, B, C, D, E, F, G, H, I) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero + ) } /** @@ -762,33 +733,31 @@ class Tuple9Group[A, B, C, D, E, F, G, H, I](implicit igroup: Group[I] ) extends Tuple9Monoid[A, B, C, D, E, F, G, H, I] with Group[(A, B, C, D, E, F, G, H, I)] { - override def negate(v: (A, B, C, D, E, F, G, H, I)): (A, B, C, D, E, F, G, H, I) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9) - ) + override def negate(v: (A, B, C, D, E, F, G, H, I)): (A, B, C, D, E, F, G, H, I) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9) + ) override def minus( l: (A, B, C, D, E, F, G, H, I), r: (A, B, C, D, E, F, G, H, I) - ): (A, B, C, D, E, F, G, H, I) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9) - ) + ): (A, B, C, D, E, F, G, H, I) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9) + ) } /** @@ -811,18 +780,17 @@ class Tuple9Ring[A, B, C, D, E, F, G, H, I](implicit override def times( l: (A, B, C, D, E, F, G, H, I), r: (A, B, C, D, E, F, G, H, I) - ): (A, B, C, D, E, F, G, H, I) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9) - ) + ): (A, B, C, D, E, F, G, H, I) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9) + ) } /** @@ -843,23 +811,22 @@ class Tuple10Semigroup[A, B, C, D, E, F, G, H, I, J](implicit override def plus( l: (A, B, C, D, E, F, G, H, I, J), r: (A, B, C, D, E, F, G, H, I, J) - ): (A, B, C, D, E, F, G, H, I, J) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10) - ) + ): (A, B, C, D, E, F, G, H, I, J) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J)] ): Option[(A, B, C, D, E, F, G, H, I, J)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -871,11 +838,9 @@ class Tuple10Semigroup[A, B, C, D, E, F, G, H, I, J](implicit val bufH = fromSumOption[H](1000) val bufI = fromSumOption[I](1000) val bufJ = fromSumOption[J](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10) } Some( ( @@ -910,19 +875,18 @@ class Tuple10Monoid[A, B, C, D, E, F, G, H, I, J](implicit jmonoid: Monoid[J] ) extends Tuple10Semigroup[A, B, C, D, E, F, G, H, I, J] with Monoid[(A, B, C, D, E, F, G, H, I, J)] { - override def zero: (A, B, C, D, E, F, G, H, I, J) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero + ) } /** @@ -941,35 +905,33 @@ class Tuple10Group[A, B, C, D, E, F, G, H, I, J](implicit jgroup: Group[J] ) extends Tuple10Monoid[A, B, C, D, E, F, G, H, I, J] with Group[(A, B, C, D, E, F, G, H, I, J)] { - override def negate(v: (A, B, C, D, E, F, G, H, I, J)): (A, B, C, D, E, F, G, H, I, J) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10) - ) + override def negate(v: (A, B, C, D, E, F, G, H, I, J)): (A, B, C, D, E, F, G, H, I, J) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J), r: (A, B, C, D, E, F, G, H, I, J) - ): (A, B, C, D, E, F, G, H, I, J) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10) - ) + ): (A, B, C, D, E, F, G, H, I, J) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10) + ) } /** @@ -988,35 +950,33 @@ class Tuple10Ring[A, B, C, D, E, F, G, H, I, J](implicit jring: Ring[J] ) extends Tuple10Group[A, B, C, D, E, F, G, H, I, J] with Ring[(A, B, C, D, E, F, G, H, I, J)] { - override def one: (A, B, C, D, E, F, G, H, I, J) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J), r: (A, B, C, D, E, F, G, H, I, J) - ): (A, B, C, D, E, F, G, H, I, J) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10) - ) + ): (A, B, C, D, E, F, G, H, I, J) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10) + ) } /** @@ -1038,24 +998,23 @@ class Tuple11Semigroup[A, B, C, D, E, F, G, H, I, J, K](implicit override def plus( l: (A, B, C, D, E, F, G, H, I, J, K), r: (A, B, C, D, E, F, G, H, I, J, K) - ): (A, B, C, D, E, F, G, H, I, J, K) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11) - ) + ): (A, B, C, D, E, F, G, H, I, J, K) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K)] ): Option[(A, B, C, D, E, F, G, H, I, J, K)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -1068,11 +1027,10 @@ class Tuple11Semigroup[A, B, C, D, E, F, G, H, I, J, K](implicit val bufI = fromSumOption[I](1000) val bufJ = fromSumOption[J](1000) val bufK = fromSumOption[K](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11) } Some( ( @@ -1109,20 +1067,19 @@ class Tuple11Monoid[A, B, C, D, E, F, G, H, I, J, K](implicit kmonoid: Monoid[K] ) extends Tuple11Semigroup[A, B, C, D, E, F, G, H, I, J, K] with Monoid[(A, B, C, D, E, F, G, H, I, J, K)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero + ) } /** @@ -1142,37 +1099,35 @@ class Tuple11Group[A, B, C, D, E, F, G, H, I, J, K](implicit kgroup: Group[K] ) extends Tuple11Monoid[A, B, C, D, E, F, G, H, I, J, K] with Group[(A, B, C, D, E, F, G, H, I, J, K)] { - override def negate(v: (A, B, C, D, E, F, G, H, I, J, K)): (A, B, C, D, E, F, G, H, I, J, K) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11) - ) + override def negate(v: (A, B, C, D, E, F, G, H, I, J, K)): (A, B, C, D, E, F, G, H, I, J, K) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K), r: (A, B, C, D, E, F, G, H, I, J, K) - ): (A, B, C, D, E, F, G, H, I, J, K) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11) - ) + ): (A, B, C, D, E, F, G, H, I, J, K) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11) + ) } /** @@ -1192,37 +1147,35 @@ class Tuple11Ring[A, B, C, D, E, F, G, H, I, J, K](implicit kring: Ring[K] ) extends Tuple11Group[A, B, C, D, E, F, G, H, I, J, K] with Ring[(A, B, C, D, E, F, G, H, I, J, K)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K), r: (A, B, C, D, E, F, G, H, I, J, K) - ): (A, B, C, D, E, F, G, H, I, J, K) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11) - ) + ): (A, B, C, D, E, F, G, H, I, J, K) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11) + ) } /** @@ -1245,25 +1198,24 @@ class Tuple12Semigroup[A, B, C, D, E, F, G, H, I, J, K, L](implicit override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L), r: (A, B, C, D, E, F, G, H, I, J, K, L) - ): (A, B, C, D, E, F, G, H, I, J, K, L) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -1277,11 +1229,10 @@ class Tuple12Semigroup[A, B, C, D, E, F, G, H, I, J, K, L](implicit val bufJ = fromSumOption[J](1000) val bufK = fromSumOption[K](1000) val bufL = fromSumOption[L](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12) } Some( ( @@ -1320,21 +1271,20 @@ class Tuple12Monoid[A, B, C, D, E, F, G, H, I, J, K, L](implicit lmonoid: Monoid[L] ) extends Tuple12Semigroup[A, B, C, D, E, F, G, H, I, J, K, L] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero + ) } /** @@ -1355,39 +1305,37 @@ class Tuple12Group[A, B, C, D, E, F, G, H, I, J, K, L](implicit lgroup: Group[L] ) extends Tuple12Monoid[A, B, C, D, E, F, G, H, I, J, K, L] with Group[(A, B, C, D, E, F, G, H, I, J, K, L)] { - override def negate(v: (A, B, C, D, E, F, G, H, I, J, K, L)): (A, B, C, D, E, F, G, H, I, J, K, L) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12) - ) + override def negate(v: (A, B, C, D, E, F, G, H, I, J, K, L)): (A, B, C, D, E, F, G, H, I, J, K, L) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L), r: (A, B, C, D, E, F, G, H, I, J, K, L) - ): (A, B, C, D, E, F, G, H, I, J, K, L) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12) + ) } /** @@ -1408,39 +1356,37 @@ class Tuple12Ring[A, B, C, D, E, F, G, H, I, J, K, L](implicit lring: Ring[L] ) extends Tuple12Group[A, B, C, D, E, F, G, H, I, J, K, L] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L), r: (A, B, C, D, E, F, G, H, I, J, K, L) - ): (A, B, C, D, E, F, G, H, I, J, K, L) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12) + ) } /** @@ -1483,7 +1429,7 @@ class Tuple13Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -1498,12 +1444,10 @@ class Tuple13Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit val bufK = fromSumOption[K](1000) val bufL = fromSumOption[L](1000) val bufM = fromSumOption[M](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13) } Some( ( @@ -1544,22 +1488,21 @@ class Tuple13Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit mmonoid: Monoid[M] ) extends Tuple13Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero + ) } /** @@ -1581,22 +1524,21 @@ class Tuple13Group[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit mgroup: Group[M] ) extends Tuple13Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M] with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M)] { - override def negate(v: (A, B, C, D, E, F, G, H, I, J, K, L, M)): (A, B, C, D, E, F, G, H, I, J, K, L, M) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13) - ) + override def negate(v: (A, B, C, D, E, F, G, H, I, J, K, L, M)): (A, B, C, D, E, F, G, H, I, J, K, L, M) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M), r: (A, B, C, D, E, F, G, H, I, J, K, L, M) @@ -1637,22 +1579,21 @@ class Tuple13Ring[A, B, C, D, E, F, G, H, I, J, K, L, M](implicit mring: Ring[M] ) extends Tuple13Group[A, B, C, D, E, F, G, H, I, J, K, L, M] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M), r: (A, B, C, D, E, F, G, H, I, J, K, L, M) @@ -1696,27 +1637,26 @@ class Tuple14Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -1732,12 +1672,10 @@ class Tuple14Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit val bufL = fromSumOption[L](1000) val bufM = fromSumOption[M](1000) val bufN = fromSumOption[N](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14) } Some( ( @@ -1780,23 +1718,22 @@ class Tuple14Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit nmonoid: Monoid[N] ) extends Tuple14Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero + ) } /** @@ -1821,43 +1758,41 @@ class Tuple14Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14) + ) } /** @@ -1880,43 +1815,41 @@ class Tuple14Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N](implicit nring: Ring[N] ) extends Tuple14Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14) + ) } /** @@ -1942,28 +1875,27 @@ class Tuple15Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -1980,12 +1912,11 @@ class Tuple15Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit val bufM = fromSumOption[M](1000) val bufN = fromSumOption[N](1000) val bufO = fromSumOption[O](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15) } Some( ( @@ -2030,24 +1961,23 @@ class Tuple15Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit omonoid: Monoid[O] ) extends Tuple15Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero + ) } /** @@ -2073,45 +2003,43 @@ class Tuple15Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15) + ) } /** @@ -2135,45 +2063,43 @@ class Tuple15Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](implicit oring: Ring[O] ) extends Tuple15Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15) + ) } /** @@ -2200,29 +2126,28 @@ class Tuple16Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -2240,13 +2165,11 @@ class Tuple16Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit val bufN = fromSumOption[N](1000) val bufO = fromSumOption[O](1000) val bufP = fromSumOption[P](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16) } Some( ( @@ -2293,25 +2216,24 @@ class Tuple16Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit pmonoid: Monoid[P] ) extends Tuple16Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero + ) } /** @@ -2338,47 +2260,45 @@ class Tuple16Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16) + ) } /** @@ -2403,47 +2323,45 @@ class Tuple16Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](implicit pring: Ring[P] ) extends Tuple16Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16) + ) } /** @@ -2471,30 +2389,29 @@ class Tuple17Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implic override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16), - qsemigroup.plus(l._17, r._17) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16), + qsemigroup.plus(l._17, r._17) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -2513,13 +2430,11 @@ class Tuple17Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implic val bufO = fromSumOption[O](1000) val bufP = fromSumOption[P](1000) val bufQ = fromSumOption[Q](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16); bufQ.put(tuple._17) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16); bufQ.put(tuple._17) } Some( ( @@ -2568,26 +2483,25 @@ class Tuple17Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit qmonoid: Monoid[Q] ) extends Tuple17Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero, - qmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero, + qmonoid.zero + ) } /** @@ -2615,49 +2529,47 @@ class Tuple17Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16), - qgroup.negate(v._17) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16), + qgroup.negate(v._17) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16), - qgroup.minus(l._17, r._17) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16), + qgroup.minus(l._17, r._17) + ) } /** @@ -2675,57 +2587,55 @@ class Tuple17Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](implicit iring: Ring[I], jring: Ring[J], kring: Ring[K], - lring: Ring[L], - mring: Ring[M], - nring: Ring[N], - oring: Ring[O], - pring: Ring[P], - qring: Ring[Q] -) extends Tuple17Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] - with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one, - qring.one - ) - override def times( - l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q), - r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16), - qring.times(l._17, r._17) - ) + lring: Ring[L], + mring: Ring[M], + nring: Ring[N], + oring: Ring[O], + pring: Ring[P], + qring: Ring[Q] +) extends Tuple17Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q] + with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q)] { + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one, + qring.one + ) + override def times( + l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q), + r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16), + qring.times(l._17, r._17) + ) } /** @@ -2754,31 +2664,30 @@ class Tuple18Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](imp override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16), - qsemigroup.plus(l._17, r._17), - rsemigroup.plus(l._18, r._18) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16), + qsemigroup.plus(l._17, r._17), + rsemigroup.plus(l._18, r._18) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -2798,13 +2707,11 @@ class Tuple18Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](imp val bufP = fromSumOption[P](1000) val bufQ = fromSumOption[Q](1000) val bufR = fromSumOption[R](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18) } Some( ( @@ -2855,27 +2762,26 @@ class Tuple18Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implic rmonoid: Monoid[R] ) extends Tuple18Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero, - qmonoid.zero, - rmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero, + qmonoid.zero, + rmonoid.zero + ) } /** @@ -2904,51 +2810,49 @@ class Tuple18Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implici with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16), - qgroup.negate(v._17), - rgroup.negate(v._18) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16), + qgroup.negate(v._17), + rgroup.negate(v._18) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16), - qgroup.minus(l._17, r._17), - rgroup.minus(l._18, r._18) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16), + qgroup.minus(l._17, r._17), + rgroup.minus(l._18, r._18) + ) } /** @@ -2975,51 +2879,49 @@ class Tuple18Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](implicit rring: Ring[R] ) extends Tuple18Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one, - qring.one, - rring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one, + qring.one, + rring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16), - qring.times(l._17, r._17), - rring.times(l._18, r._18) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16), + qring.times(l._17, r._17), + rring.times(l._18, r._18) + ) } /** @@ -3049,32 +2951,31 @@ class Tuple19Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16), - qsemigroup.plus(l._17, r._17), - rsemigroup.plus(l._18, r._18), - ssemigroup.plus(l._19, r._19) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16), + qsemigroup.plus(l._17, r._17), + rsemigroup.plus(l._18, r._18), + ssemigroup.plus(l._19, r._19) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -3095,13 +2996,11 @@ class Tuple19Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( val bufQ = fromSumOption[Q](1000) val bufR = fromSumOption[R](1000) val bufS = fromSumOption[S](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); bufS.put(tuple._19) } Some( @@ -3155,28 +3054,27 @@ class Tuple19Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](imp smonoid: Monoid[S] ) extends Tuple19Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero, - qmonoid.zero, - rmonoid.zero, - smonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero, + qmonoid.zero, + rmonoid.zero, + smonoid.zero + ) } /** @@ -3206,53 +3104,51 @@ class Tuple19Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](impl with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16), - qgroup.negate(v._17), - rgroup.negate(v._18), - sgroup.negate(v._19) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16), + qgroup.negate(v._17), + rgroup.negate(v._18), + sgroup.negate(v._19) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16), - qgroup.minus(l._17, r._17), - rgroup.minus(l._18, r._18), - sgroup.minus(l._19, r._19) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16), + qgroup.minus(l._17, r._17), + rgroup.minus(l._18, r._18), + sgroup.minus(l._19, r._19) + ) } /** @@ -3280,53 +3176,51 @@ class Tuple19Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](impli sring: Ring[S] ) extends Tuple19Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one, - qring.one, - rring.one, - sring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one, + qring.one, + rring.one, + sring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16), - qring.times(l._17, r._17), - rring.times(l._18, r._18), - sring.times(l._19, r._19) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16), + qring.times(l._17, r._17), + rring.times(l._18, r._18), + sring.times(l._19, r._19) + ) } /** @@ -3357,33 +3251,32 @@ class Tuple20Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16), - qsemigroup.plus(l._17, r._17), - rsemigroup.plus(l._18, r._18), - ssemigroup.plus(l._19, r._19), - tsemigroup.plus(l._20, r._20) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16), + qsemigroup.plus(l._17, r._17), + rsemigroup.plus(l._18, r._18), + ssemigroup.plus(l._19, r._19), + tsemigroup.plus(l._20, r._20) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -3405,13 +3298,11 @@ class Tuple20Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, val bufR = fromSumOption[R](1000) val bufS = fromSumOption[S](1000) val bufT = fromSumOption[T](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); bufS.put(tuple._19); bufT.put(tuple._20) } Some( @@ -3467,29 +3358,28 @@ class Tuple20Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( tmonoid: Monoid[T] ) extends Tuple20Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero, - qmonoid.zero, - rmonoid.zero, - smonoid.zero, - tmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero, + qmonoid.zero, + rmonoid.zero, + smonoid.zero, + tmonoid.zero + ) } /** @@ -3520,55 +3410,53 @@ class Tuple20Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](i with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16), - qgroup.negate(v._17), - rgroup.negate(v._18), - sgroup.negate(v._19), - tgroup.negate(v._20) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16), + qgroup.negate(v._17), + rgroup.negate(v._18), + sgroup.negate(v._19), + tgroup.negate(v._20) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16), - qgroup.minus(l._17, r._17), - rgroup.minus(l._18, r._18), - sgroup.minus(l._19, r._19), - tgroup.minus(l._20, r._20) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16), + qgroup.minus(l._17, r._17), + rgroup.minus(l._18, r._18), + sgroup.minus(l._19, r._19), + tgroup.minus(l._20, r._20) + ) } /** @@ -3597,55 +3485,53 @@ class Tuple20Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](im tring: Ring[T] ) extends Tuple20Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one, - qring.one, - rring.one, - sring.one, - tring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one, + qring.one, + rring.one, + sring.one, + tring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16), - qring.times(l._17, r._17), - rring.times(l._18, r._18), - sring.times(l._19, r._19), - tring.times(l._20, r._20) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16), + qring.times(l._17, r._17), + rring.times(l._18, r._18), + sring.times(l._19, r._19), + tring.times(l._20, r._20) + ) } /** @@ -3677,34 +3563,33 @@ class Tuple21Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16), - qsemigroup.plus(l._17, r._17), - rsemigroup.plus(l._18, r._18), - ssemigroup.plus(l._19, r._19), - tsemigroup.plus(l._20, r._20), - usemigroup.plus(l._21, r._21) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16), + qsemigroup.plus(l._17, r._17), + rsemigroup.plus(l._18, r._18), + ssemigroup.plus(l._19, r._19), + tsemigroup.plus(l._20, r._20), + usemigroup.plus(l._21, r._21) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -3727,13 +3612,11 @@ class Tuple21Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, val bufS = fromSumOption[S](1000) val bufT = fromSumOption[T](1000) val bufU = fromSumOption[U](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); bufS.put(tuple._19); bufT.put(tuple._20); bufU.put(tuple._21) } Some( @@ -3791,30 +3674,29 @@ class Tuple21Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, umonoid: Monoid[U] ) extends Tuple21Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero, - qmonoid.zero, - rmonoid.zero, - smonoid.zero, - tmonoid.zero, - umonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero, + qmonoid.zero, + rmonoid.zero, + smonoid.zero, + tmonoid.zero, + umonoid.zero + ) } /** @@ -3846,57 +3728,55 @@ class Tuple21Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16), - qgroup.negate(v._17), - rgroup.negate(v._18), - sgroup.negate(v._19), - tgroup.negate(v._20), - ugroup.negate(v._21) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16), + qgroup.negate(v._17), + rgroup.negate(v._18), + sgroup.negate(v._19), + tgroup.negate(v._20), + ugroup.negate(v._21) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16), - qgroup.minus(l._17, r._17), - rgroup.minus(l._18, r._18), - sgroup.minus(l._19, r._19), - tgroup.minus(l._20, r._20), - ugroup.minus(l._21, r._21) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16), + qgroup.minus(l._17, r._17), + rgroup.minus(l._18, r._18), + sgroup.minus(l._19, r._19), + tgroup.minus(l._20, r._20), + ugroup.minus(l._21, r._21) + ) } /** @@ -3926,57 +3806,55 @@ class Tuple21Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] uring: Ring[U] ) extends Tuple21Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one, - qring.one, - rring.one, - sring.one, - tring.one, - uring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one, + qring.one, + rring.one, + sring.one, + tring.one, + uring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16), - qring.times(l._17, r._17), - rring.times(l._18, r._18), - sring.times(l._19, r._19), - tring.times(l._20, r._20), - uring.times(l._21, r._21) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16), + qring.times(l._17, r._17), + rring.times(l._18, r._18), + sring.times(l._19, r._19), + tring.times(l._20, r._20), + uring.times(l._21, r._21) + ) } /** @@ -4009,35 +3887,34 @@ class Tuple22Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, override def plus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = - ( - asemigroup.plus(l._1, r._1), - bsemigroup.plus(l._2, r._2), - csemigroup.plus(l._3, r._3), - dsemigroup.plus(l._4, r._4), - esemigroup.plus(l._5, r._5), - fsemigroup.plus(l._6, r._6), - gsemigroup.plus(l._7, r._7), - hsemigroup.plus(l._8, r._8), - isemigroup.plus(l._9, r._9), - jsemigroup.plus(l._10, r._10), - ksemigroup.plus(l._11, r._11), - lsemigroup.plus(l._12, r._12), - msemigroup.plus(l._13, r._13), - nsemigroup.plus(l._14, r._14), - osemigroup.plus(l._15, r._15), - psemigroup.plus(l._16, r._16), - qsemigroup.plus(l._17, r._17), - rsemigroup.plus(l._18, r._18), - ssemigroup.plus(l._19, r._19), - tsemigroup.plus(l._20, r._20), - usemigroup.plus(l._21, r._21), - vsemigroup.plus(l._22, r._22) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = ( + asemigroup.plus(l._1, r._1), + bsemigroup.plus(l._2, r._2), + csemigroup.plus(l._3, r._3), + dsemigroup.plus(l._4, r._4), + esemigroup.plus(l._5, r._5), + fsemigroup.plus(l._6, r._6), + gsemigroup.plus(l._7, r._7), + hsemigroup.plus(l._8, r._8), + isemigroup.plus(l._9, r._9), + jsemigroup.plus(l._10, r._10), + ksemigroup.plus(l._11, r._11), + lsemigroup.plus(l._12, r._12), + msemigroup.plus(l._13, r._13), + nsemigroup.plus(l._14, r._14), + osemigroup.plus(l._15, r._15), + psemigroup.plus(l._16, r._16), + qsemigroup.plus(l._17, r._17), + rsemigroup.plus(l._18, r._18), + ssemigroup.plus(l._19, r._19), + tsemigroup.plus(l._20, r._20), + usemigroup.plus(l._21, r._21), + vsemigroup.plus(l._22, r._22) + ) override def sumOption( to: TraversableOnce[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] ): Option[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] = - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { val bufA = fromSumOption[A](1000) val bufB = fromSumOption[B](1000) @@ -4061,15 +3938,12 @@ class Tuple22Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, val bufT = fromSumOption[T](1000) val bufU = fromSumOption[U](1000) val bufV = fromSumOption[V](1000) - to.foreach { tuple => - bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); - bufD.put(tuple._4); bufE.put(tuple._5); bufF.put(tuple._6); - bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); - bufJ.put(tuple._10); bufK.put(tuple._11); bufL.put(tuple._12); - bufM.put(tuple._13); bufN.put(tuple._14); bufO.put(tuple._15); - bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); - bufS.put(tuple._19); bufT.put(tuple._20); bufU.put(tuple._21); - bufV.put(tuple._22) + to.iterator.foreach { tuple => + bufA.put(tuple._1); bufB.put(tuple._2); bufC.put(tuple._3); bufD.put(tuple._4); bufE.put(tuple._5); + bufF.put(tuple._6); bufG.put(tuple._7); bufH.put(tuple._8); bufI.put(tuple._9); bufJ.put(tuple._10); + bufK.put(tuple._11); bufL.put(tuple._12); bufM.put(tuple._13); bufN.put(tuple._14); + bufO.put(tuple._15); bufP.put(tuple._16); bufQ.put(tuple._17); bufR.put(tuple._18); + bufS.put(tuple._19); bufT.put(tuple._20); bufU.put(tuple._21); bufV.put(tuple._22) } Some( ( @@ -4128,31 +4002,30 @@ class Tuple22Monoid[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, vmonoid: Monoid[V] ) extends Tuple22Semigroup[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] with Monoid[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] { - override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = - ( - amonoid.zero, - bmonoid.zero, - cmonoid.zero, - dmonoid.zero, - emonoid.zero, - fmonoid.zero, - gmonoid.zero, - hmonoid.zero, - imonoid.zero, - jmonoid.zero, - kmonoid.zero, - lmonoid.zero, - mmonoid.zero, - nmonoid.zero, - omonoid.zero, - pmonoid.zero, - qmonoid.zero, - rmonoid.zero, - smonoid.zero, - tmonoid.zero, - umonoid.zero, - vmonoid.zero - ) + override def zero: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = ( + amonoid.zero, + bmonoid.zero, + cmonoid.zero, + dmonoid.zero, + emonoid.zero, + fmonoid.zero, + gmonoid.zero, + hmonoid.zero, + imonoid.zero, + jmonoid.zero, + kmonoid.zero, + lmonoid.zero, + mmonoid.zero, + nmonoid.zero, + omonoid.zero, + pmonoid.zero, + qmonoid.zero, + rmonoid.zero, + smonoid.zero, + tmonoid.zero, + umonoid.zero, + vmonoid.zero + ) } /** @@ -4185,59 +4058,57 @@ class Tuple22Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U with Group[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] { override def negate( v: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = - ( - agroup.negate(v._1), - bgroup.negate(v._2), - cgroup.negate(v._3), - dgroup.negate(v._4), - egroup.negate(v._5), - fgroup.negate(v._6), - ggroup.negate(v._7), - hgroup.negate(v._8), - igroup.negate(v._9), - jgroup.negate(v._10), - kgroup.negate(v._11), - lgroup.negate(v._12), - mgroup.negate(v._13), - ngroup.negate(v._14), - ogroup.negate(v._15), - pgroup.negate(v._16), - qgroup.negate(v._17), - rgroup.negate(v._18), - sgroup.negate(v._19), - tgroup.negate(v._20), - ugroup.negate(v._21), - vgroup.negate(v._22) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = ( + agroup.negate(v._1), + bgroup.negate(v._2), + cgroup.negate(v._3), + dgroup.negate(v._4), + egroup.negate(v._5), + fgroup.negate(v._6), + ggroup.negate(v._7), + hgroup.negate(v._8), + igroup.negate(v._9), + jgroup.negate(v._10), + kgroup.negate(v._11), + lgroup.negate(v._12), + mgroup.negate(v._13), + ngroup.negate(v._14), + ogroup.negate(v._15), + pgroup.negate(v._16), + qgroup.negate(v._17), + rgroup.negate(v._18), + sgroup.negate(v._19), + tgroup.negate(v._20), + ugroup.negate(v._21), + vgroup.negate(v._22) + ) override def minus( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = - ( - agroup.minus(l._1, r._1), - bgroup.minus(l._2, r._2), - cgroup.minus(l._3, r._3), - dgroup.minus(l._4, r._4), - egroup.minus(l._5, r._5), - fgroup.minus(l._6, r._6), - ggroup.minus(l._7, r._7), - hgroup.minus(l._8, r._8), - igroup.minus(l._9, r._9), - jgroup.minus(l._10, r._10), - kgroup.minus(l._11, r._11), - lgroup.minus(l._12, r._12), - mgroup.minus(l._13, r._13), - ngroup.minus(l._14, r._14), - ogroup.minus(l._15, r._15), - pgroup.minus(l._16, r._16), - qgroup.minus(l._17, r._17), - rgroup.minus(l._18, r._18), - sgroup.minus(l._19, r._19), - tgroup.minus(l._20, r._20), - ugroup.minus(l._21, r._21), - vgroup.minus(l._22, r._22) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = ( + agroup.minus(l._1, r._1), + bgroup.minus(l._2, r._2), + cgroup.minus(l._3, r._3), + dgroup.minus(l._4, r._4), + egroup.minus(l._5, r._5), + fgroup.minus(l._6, r._6), + ggroup.minus(l._7, r._7), + hgroup.minus(l._8, r._8), + igroup.minus(l._9, r._9), + jgroup.minus(l._10, r._10), + kgroup.minus(l._11, r._11), + lgroup.minus(l._12, r._12), + mgroup.minus(l._13, r._13), + ngroup.minus(l._14, r._14), + ogroup.minus(l._15, r._15), + pgroup.minus(l._16, r._16), + qgroup.minus(l._17, r._17), + rgroup.minus(l._18, r._18), + sgroup.minus(l._19, r._19), + tgroup.minus(l._20, r._20), + ugroup.minus(l._21, r._21), + vgroup.minus(l._22, r._22) + ) } /** @@ -4268,59 +4139,57 @@ class Tuple22Ring[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, vring: Ring[V] ) extends Tuple22Group[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V] with Ring[(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] { - override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = - ( - aring.one, - bring.one, - cring.one, - dring.one, - ering.one, - fring.one, - gring.one, - hring.one, - iring.one, - jring.one, - kring.one, - lring.one, - mring.one, - nring.one, - oring.one, - pring.one, - qring.one, - rring.one, - sring.one, - tring.one, - uring.one, - vring.one - ) + override def one: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = ( + aring.one, + bring.one, + cring.one, + dring.one, + ering.one, + fring.one, + gring.one, + hring.one, + iring.one, + jring.one, + kring.one, + lring.one, + mring.one, + nring.one, + oring.one, + pring.one, + qring.one, + rring.one, + sring.one, + tring.one, + uring.one, + vring.one + ) override def times( l: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V), r: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) - ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = - ( - aring.times(l._1, r._1), - bring.times(l._2, r._2), - cring.times(l._3, r._3), - dring.times(l._4, r._4), - ering.times(l._5, r._5), - fring.times(l._6, r._6), - gring.times(l._7, r._7), - hring.times(l._8, r._8), - iring.times(l._9, r._9), - jring.times(l._10, r._10), - kring.times(l._11, r._11), - lring.times(l._12, r._12), - mring.times(l._13, r._13), - nring.times(l._14, r._14), - oring.times(l._15, r._15), - pring.times(l._16, r._16), - qring.times(l._17, r._17), - rring.times(l._18, r._18), - sring.times(l._19, r._19), - tring.times(l._20, r._20), - uring.times(l._21, r._21), - vring.times(l._22, r._22) - ) + ): (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = ( + aring.times(l._1, r._1), + bring.times(l._2, r._2), + cring.times(l._3, r._3), + dring.times(l._4, r._4), + ering.times(l._5, r._5), + fring.times(l._6, r._6), + gring.times(l._7, r._7), + hring.times(l._8, r._8), + iring.times(l._9, r._9), + jring.times(l._10, r._10), + kring.times(l._11, r._11), + lring.times(l._12, r._12), + mring.times(l._13, r._13), + nring.times(l._14, r._14), + oring.times(l._15, r._15), + pring.times(l._16, r._16), + qring.times(l._17, r._17), + rring.times(l._18, r._18), + sring.times(l._19, r._19), + tring.times(l._20, r._20), + uring.times(l._21, r._21), + vring.times(l._22, r._22) + ) } trait GeneratedSemigroupImplicits { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala index adac1141d..8d2188d16 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLog.scala @@ -419,7 +419,7 @@ case class SparseHLL(override val bits: Int, maxRhow: Map[Int, Max[Byte]]) exten val iter: Iterator[(Int, Max[Byte])] = maxRhow.iterator while (iter.hasNext) { - val (idx, _) = iter.next + val (idx, _) = iter.next() val existing: Byte = newContents(idx) val other: Byte = maxRhow(idx).get @@ -575,12 +575,12 @@ class HyperLogLogMonoid(val bits: Int) extends Monoid[HLL] with BoundedSemilatti None } else { val iter = items.iterator.buffered - var curValue = iter.next + var curValue = iter.next() while (iter.hasNext) { curValue = (curValue, iter.head) match { case (DenseHLL(_, _), _) => denseUpdate(curValue, iter) case (_, DenseHLL(_, _)) => denseUpdate(curValue, iter) - case _ => curValue + iter.next + case _ => curValue + iter.next() } } Some(curValue) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala index f795b1a4c..75b5c7ccc 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/HyperLogLogSeries.scala @@ -62,7 +62,7 @@ case class HLLSeries(bits: Int, rows: Vector[Map[Int, Long]]) { while (i >= 0) { val it = rows(i).iterator while (it.hasNext) { - val (k, t) = it.next + val (k, t) = it.next() if (t >= threshold && seen.add(k)) { sum += HyperLogLog.negativePowersOfTwo(i + 1) } @@ -142,7 +142,7 @@ class HyperLogLogSeriesMonoid(val bits: Int) extends Monoid[HLLSeries] { val bldr = Vector.newBuilder[Map[Int, Long]] val lit = left.rows.iterator val rit = right.rows.iterator - while (lit.hasNext && rit.hasNext) bldr += combine(lit.next, rit.next) + while (lit.hasNext && rit.hasNext) bldr += combine(lit.next(), rit.next()) val zipped = bldr.result() HLLSeries(bits, zipped ++ right.rows.slice(ln, rn)) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala index d800ec791..5fc8d6dc4 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala @@ -25,7 +25,7 @@ import java.lang.{ } import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object JIntRing extends Ring[JInt] { override val zero: JInt = JInt.valueOf(0) diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala index 8ee81c42d..55a9f8e54 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MapAlgebra.scala @@ -224,7 +224,7 @@ object MapAlgebra { } else oldVOpt.get bldr += v } - mutable.iterator.map { case (k, bldr) => (k, bldr.result) }.toMap + mutable.iterator.map { case (k, bldr) => (k, bldr.result()) }.toMap } // Consider this as edges from k -> v, produce a Map[K,Set[V]] diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Max.scala b/algebird-core/src/main/scala/com/twitter/algebird/Max.scala index df95c4691..6e84c7541 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Max.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Max.scala @@ -160,8 +160,8 @@ private[algebird] sealed abstract class LowPriorityMaxInstances { while (true) { if (xs.hasNext) { if (ys.hasNext) { - val x = xs.next - val y = ys.next + val x = xs.next() + val y = ys.next() val cmp = ord.compare(x, y) if (cmp != 0) return cmp } else { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala index ada06450b..5c6b9ebc9 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/MinHasher.scala @@ -69,7 +69,7 @@ abstract class MinHasher[H](val numHashes: Int, val numBands: Int)(implicit n: N private val hashFunctions = { val r = new scala.util.Random(seed) val numHashFunctions = math.ceil(numBytes / 16.0).toInt - (1 to numHashFunctions).map(_ => MurmurHash128(r.nextLong)) + (1 to numHashFunctions).map(_ => MurmurHash128(r.nextLong())) } /** Signature for empty set, needed to be a proper Monoid */ diff --git a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala index 2376cfbf8..c78897715 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/QTree.scala @@ -151,9 +151,9 @@ class QTreeSemigroup[A](k: Int)(implicit val underlyingMonoid: Monoid[A]) extend val batchSize = compressBatchSize var count = 1 // start at 1, so we only compress after batchSize items val iter = items.toIterator - var result = iter.next // due to not being empty, this does not throw + var result = iter.next() // due to not being empty, this does not throw while (iter.hasNext) { - result = result.merge(iter.next) + result = result.merge(iter.next()) count += 1 if (count % batchSize == 0) { result = result.compress(k) @@ -428,8 +428,8 @@ class QTree[@specialized(Int, Long, Float, Double) A] private[algebird] ( print(" (" + parentCount + ")") } println(" {" + _sum + "}") - lowerChild.foreach(_.dump) - upperChild.foreach(_.dump) + lowerChild.foreach(_.dump()) + upperChild.foreach(_.dump()) } /** diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala index ff0dce400..d1d10ced7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Scan.scala @@ -169,9 +169,9 @@ sealed abstract class Scan[-I, +O] extends Serializable { def scanIterator(iter: Iterator[I]): Iterator[O] = new AbstractIterator[O] { override def hasNext: Boolean = iter.hasNext var state: State = initialState - override def next: O = { + override def next(): O = { val thisState = state - val thisA = iter.next + val thisA = iter.next() val (thisC, nextState) = presentAndNextState(thisA, thisState) state = nextState thisC diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala index f5973c338..e327ed57c 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SketchMap.scala @@ -145,7 +145,7 @@ case class SketchMapParams[K](seed: Int, width: Int, depth: Int, heavyHittersCou val numCounters = width (0 to (numHashes - 1)).map { _ => val smhash: SketchMapHash[K] = - SketchMapHash(CMSHash[Long](r.nextInt, 0, numCounters), seed)(serialization) + SketchMapHash(CMSHash[Long](r.nextInt(), 0, numCounters), seed)(serialization) (k: K) => smhash(k) } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala index 68830547e..d18b58dd6 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SpaceSaver.scala @@ -78,7 +78,7 @@ object SpaceSaver { buff.putLong(b) buffer ++= buff.array() } - buffer.result.toArray + buffer.result().toArray } // Make sure to be reversible so fromBytes(toBytes(x)) == x diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala index 4cd9a1505..01f68d141 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingCache.scala @@ -22,7 +22,7 @@ package com.twitter.algebird */ import java.util.{LinkedHashMap => JLinkedHashMap, Map => JMap} import scala.collection.mutable.{Map => MMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object SummingCache { def apply[K, V: Semigroup](cap: Int): SummingCache[K, V] = @@ -57,7 +57,7 @@ class SummingCache[K, V](capacity: Int)(implicit sgv: Semigroup[V]) extends Stat override def flush: Option[Map[K, V]] = { // Get a copy of the cache, since it is mutable val res = optNonEmpty(cache.iterator.toMap) - cache.clear + cache.clear() res } override def isFlushed: Boolean = cache.isEmpty diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala index cd9e7deaf..7644aca2e 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingIterator.scala @@ -49,16 +49,16 @@ class SummingIterator[V](summer: StatefulSummer[V], it: Iterator[V]) // This has to be lazy because it shouldn't be touched until the val it is exhausted protected lazy val tailIter: Iterator[V] = summer.flush.iterator override def hasNext: Boolean = it.hasNext || tailIter.hasNext - override def next: V = nextInternal + override def next(): V = nextInternal @tailrec private def nextInternal: V = if (it.hasNext) { - summer.put(it.next) match { + summer.put(it.next()) match { case None => nextInternal case Some(v) => v } } else { - tailIter.next + tailIter.next() } } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala b/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala index 0717e54c1..8ff21d0a9 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/SummingQueue.scala @@ -32,7 +32,7 @@ package com.twitter.algebird */ import java.util.concurrent.ArrayBlockingQueue -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer object SummingQueue { diff --git a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala index 8df431d7e..199553780 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/Window.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/Window.scala @@ -126,7 +126,7 @@ abstract class WindowMonoid[T](windowSize: Int) extends Monoid[Window[T]] { val it = ws.toIterator var queue = Queue.empty[T] while (it.hasNext) { - queue = (queue ++ it.next.items).takeRight(windowSize) + queue = (queue ++ it.next().items).takeRight(windowSize) } Some(Window(monoid.sum(queue), queue)) } @@ -140,7 +140,7 @@ abstract class WindowMonoid[T](windowSize: Int) extends Monoid[Window[T]] { while (it.hasNext) { // avoid materializing the whole list in memory // at one time - queue = queue :+ it.next + queue = queue :+ it.next() size = size + 1 if (size > windowSize) { queue = queue.tail diff --git a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala index f970c43f3..c50d912d7 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/matrix/AdaptiveMatrix.scala @@ -95,7 +95,7 @@ object AdaptiveMatrix { var row = 0 val iter = storage.iterator while (iter.hasNext) { - val curRow = iter.next + val curRow = iter.next() curRow.foreach { case (col, value) => buffer(row * cols + col) = value } @@ -114,7 +114,7 @@ object AdaptiveMatrix { val sparseStorage = (0 until rows).map(_ => MMap[Int, V]()).toIndexedSeq while (iter.hasNext) { - val current = iter.next + val current = iter.next() current match { case d @ DenseMatrix(_, _, _) => return denseUpdate(d, iter) case s @ SparseColumnMatrix(_) => diff --git a/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala b/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala index 69f553360..b1f95b21a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/matrix/SparseColumnMatrix.scala @@ -49,7 +49,7 @@ case class SparseColumnMatrix[V: Monoid](rowsByColumns: IndexedSeq[AdaptiveVecto while (row < rows) { val iter = rowsByColumns(row).denseIterator while (iter.hasNext) { - val (col, value) = iter.next + val (col, value) = iter.next() val indx = row * lcols + col buffer(indx) = valueMonoid.plus(buffer(indx), value) } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala index 3a5e212b8..cc01f4e16 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/mutable/PriorityQueueAggregator.scala @@ -16,7 +16,7 @@ limitations under the License. package com.twitter.algebird.mutable import com.twitter.algebird.MonoidAggregator -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import java.util.PriorityQueue diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala index d4ea8f9ea..a1f9d3c7d 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/GaussianDistributionMonoid.scala @@ -39,7 +39,7 @@ object GaussianDistributionMonoid extends Monoid[GaussianDistribution] { var sigma2 = 0.0 val it = its.toIterator while (it.hasNext) { - val g = it.next + val g = it.next() mean += g.mean sigma2 += g.sigma2 } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala index 5c3e4c37b..38c026937 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/IterCallStatistics.scala @@ -36,7 +36,7 @@ private class IterCallStatistics(threadSafe: Boolean) { total.add(v) // log2(v + 1) for v up to 2^maxBucket val bucket = min(64 - numberOfLeadingZeros(v), maxBucket) - distribution(bucket).increment + distribution(bucket).increment() } def count: Long = distribution.foldLeft(0L)(_ + _.get) // sum @@ -59,8 +59,8 @@ private class IterCallStatistics(threadSafe: Boolean) { private class CountingIterator[T](val i: Iterator[T]) extends Iterator[T] { private[this] final var nextCount: Long = 0 override def hasNext: Boolean = i.hasNext - override def next: T = { - val n = i.next + override def next(): T = { + val n = i.next() nextCount += 1 n } diff --git a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala index ce166c250..3becb8b8a 100644 --- a/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala +++ b/algebird-core/src/main/scala/com/twitter/algebird/statistics/Statistics.scala @@ -37,7 +37,7 @@ class StatisticsSemigroup[T](threadSafe: Boolean = true)(implicit wrappedSemigro def getSumOptionCallTime: Long = sumOptionCallsStats.getTotalCallTime override def plus(x: T, y: T): T = { - plusCallsCount.increment + plusCallsCount.increment() Semigroup.plus(x, y) } @@ -66,7 +66,7 @@ class StatisticsMonoid[T](threadSafe: Boolean = true)(implicit wrappedMonoid: Mo def getSumCallTime: Long = sumCallsStats.getTotalCallTime override def zero: T = { - zeroCallsCount.increment + zeroCallsCount.increment() Monoid.zero } @@ -95,12 +95,12 @@ class StatisticsGroup[T](threadSafe: Boolean = true)(implicit group: Group[T]) def getMinusCallCount: Long = minusCallsCount.get override def negate(x: T): T = { - negateCallsCount.increment + negateCallsCount.increment() Group.negate(x) } override def minus(l: T, r: T): T = { - minusCallsCount.increment + minusCallsCount.increment() Group.minus(l, r) } @@ -129,12 +129,12 @@ class StatisticsRing[T](threadSafe: Boolean = true)(implicit ring: Ring[T]) def getProductCallTime: Long = productCallsStats.getTotalCallTime override def one: T = { - oneCallsCount.increment + oneCallsCount.increment() Ring.one } override def times(x: T, y: T): T = { - timesCallsCount.increment + timesCallsCount.increment() Ring.times(x, y) } diff --git a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala index 990e07ee3..ab7df11f7 100644 --- a/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala +++ b/algebird-generic/src/main/scala/com/twitter/algebird/generic/Instances.scala @@ -2,6 +2,7 @@ package com.twitter.algebird.generic import shapeless._ import com.twitter.algebird._ +import scala.collection.compat._ object Shapeless extends Shapeless3 { @@ -105,12 +106,12 @@ class HConsSemigroup[A, B <: HList](protected val a: Semigroup[A], protected val a.plus(x.head, y.head) :: b.plus(x.tail, y.tail) override def sumOption(xs: TraversableOnce[A :: B]): Option[A :: B] = - if (xs.isEmpty) { + if (xs.iterator.isEmpty) { None } else { val bufA = ArrayBufferedOperation.fromSumOption[A](1000)(a) val bufB = ArrayBufferedOperation.fromSumOption[B](1000)(b) - xs.foreach { case a0 :: b0 => + xs.iterator.foreach { case a0 :: b0 => bufA.put(a0) bufB.put(b0) } diff --git a/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala b/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala index a061afba1..12127a770 100644 --- a/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala +++ b/algebird-test/src/main/scala/com/twitter/algebird/macros/ArbitraryCaseClassMacro.scala @@ -2,7 +2,6 @@ package com.twitter.algebird.macros import scala.language.experimental.macros -import com.twitter.algebird._ import com.twitter.algebird.macros.MacroCompat._ import org.scalacheck.{Arbitrary, Gen} diff --git a/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala index 727dd80c0..192b5739b 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/JavaBoxedTests.scala @@ -5,7 +5,7 @@ import java.util.{List => JList, Map => JMap} import org.scalacheck.{Arbitrary, Gen} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import java.{util => ju} class JavaBoxedTests extends CheckProperties { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala index f1ab5b373..bca9dc748 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TopKTests.scala @@ -22,7 +22,7 @@ import com.twitter.algebird.mutable.PriorityQueueMonoid import org.scalacheck.Arbitrary import org.scalacheck.Prop._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class TopKTests extends CheckProperties { import com.twitter.algebird.BaseProperties._ diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala index c66e7a8be..65a464201 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListMMapSum.scala @@ -18,6 +18,7 @@ package com.twitter.algebird.util.summer import com.twitter.algebird._ import com.twitter.util.{Future, FuturePool} import scala.collection.mutable.{ListBuffer, Map => MMap} +import scala.collection.compat._ /** * @author @@ -54,7 +55,7 @@ class AsyncListMMapSum[Key, Value]( val curData = mutex.synchronized { presentTuples = 0 val l = queueMap.toList - queueMap.clear + queueMap.clear() l } val result: Map[Key, Value] = curData.iterator.flatMap { case (k, listV) => @@ -66,11 +67,11 @@ class AsyncListMMapSum[Key, Value]( } def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = { - insertOp.incr + insertOp.incr() var newlyAddedTuples = 0 mutex.synchronized { - vals.foreach { case (k, v) => + vals.iterator.foreach { case (k, v) => val existingV = queueMap.getOrElseUpdate(k, ListBuffer[Value]()) existingV += v newlyAddedTuples += 1 @@ -79,7 +80,7 @@ class AsyncListMMapSum[Key, Value]( } if (presentTuples >= bufferSize.v) { - sizeIncr.incr + sizeIncr.incr() flush } else Future.value(emptyResult) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala index b1236bbbe..b19704c03 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncListSum.scala @@ -20,10 +20,11 @@ import java.util.concurrent.atomic.AtomicInteger import com.twitter.algebird._ import com.twitter.util.{Future, FuturePool} +import com.twitter.algebird.util.UtilAlgebras._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.{Set => MSet} -import com.twitter.algebird.util.UtilAlgebras._ +import scala.collection.compat._ /** * @author @@ -88,7 +89,7 @@ class AsyncListSum[Key, Value]( val keys = MSet[Key]() keys ++= queueMap.keySet.iterator.asScala - val lFuts = Future.collect(keys.toIterator.flatMap { k => + val lFuts = Future.collect(keys.iterator.flatMap { k => val retV = queueMap.remove(k) if (retV != null) { @@ -103,7 +104,7 @@ class AsyncListSum[Key, Value]( @annotation.tailrec private[this] final def doInsert(key: Key, value: Value): Unit = { - tuplesIn.incr + tuplesIn.incr() val (success, countChange) = if (queueMap.containsKey(key)) { val oldValue = queueMap.get(key) if (oldValue != null) { @@ -121,20 +122,20 @@ class AsyncListSum[Key, Value]( // Successful insert elementsInCache.addAndGet(countChange) } else { - insertFails.incr + insertFails.incr() return doInsert(key, value) } } def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = workPool { - insertOp.incr - vals.foreach { case (k, v) => + insertOp.incr() + vals.iterator.foreach { case (k, v) => doInsert(k, v) } if (elementsInCache.get >= innerBuffSize) { - sizeIncr.incr + sizeIncr.incr() flush } else { Future.value(Map.empty[Key, Value]) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala index 731f65de6..7f613129b 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncMapSum.scala @@ -19,7 +19,8 @@ import com.twitter.algebird._ import com.twitter.util.{Future, FuturePool} import java.util.concurrent.ArrayBlockingQueue import scala.collection.mutable.ArrayBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ +import scala.collection.compat._ /** * @author @@ -58,12 +59,12 @@ class AsyncMapSum[Key, Value]( } def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = { - insertOp.incr + insertOp.incr() - val curData = Semigroup.sumOption(vals.map(Map(_))).getOrElse(Map.empty) + val curData = Semigroup.sumOption(vals.iterator.map(Map(_))).getOrElse(Map.empty) if (!queue.offer(curData)) { flush.map { flushRes => - sizeIncr.incr // todo not sure if need to increase size + sizeIncr.incr() // todo not sure if need to increase size Semigroup.plus(flushRes, curData) } } else { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala index 54a23b6e8..422d6c4c8 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/AsyncSummer.scala @@ -69,11 +69,11 @@ trait WithFlushConditions[T, M <: Iterable[T]] extends AsyncSummer[T, M] { def tick: Future[M] = if (timedOut) { - timeoutIncr.incr + timeoutIncr.incr() lastDump = System.currentTimeMillis // reset the timeout condition flush } else if (memoryWaterMark) { - memoryIncr.incr + memoryIncr.incr() lastDump = System.currentTimeMillis // reset the timeout condition flush } else { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala index bd0e9e2a8..9fd752799 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/HeavyHittersCachingSummer.scala @@ -20,6 +20,8 @@ import com.twitter.util.Future import scala.collection.mutable.ListBuffer +import scala.collection.compat._ + /** * @author * Ian O Connell @@ -53,7 +55,7 @@ class ApproxHHTracker(hhPct: HeavyHittersPercent, updateFreq: UpdateFrequency, r private[this] final val hashes: IndexedSeq[CMSHash[Long]] = { val r = new scala.util.Random(5) - (0 until DEPTH).map(_ => CMSHash[Long](r.nextInt, 0, WIDTH)) + (0 until DEPTH).map(_ => CMSHash[Long](r.nextInt(), 0, WIDTH)) }.toIndexedSeq @inline @@ -91,7 +93,7 @@ class ApproxHHTracker(hhPct: HeavyHittersPercent, updateFreq: UpdateFrequency, r def pruneHH(): Unit = { val iter = hh.values.iterator while (iter.hasNext) { - val n = iter.next + val n = iter.next() if (n < hhMinReq) { iter.remove } @@ -118,7 +120,7 @@ class ApproxHHTracker(hhPct: HeavyHittersPercent, updateFreq: UpdateFrequency, r // We include the ability to reset the CMS so we can age our counters // over time private[this] def resetCMS(): Unit = { - hh.clear + hh.clear() totalCount = 0L hhMinReq = 0L countsTable = Array.fill(WIDTH * DEPTH)(0L) @@ -158,7 +160,7 @@ class ApproxHHTracker(hhPct: HeavyHittersPercent, updateFreq: UpdateFrequency, r val hh = new ListBuffer[T] val nonHH = new ListBuffer[T] - t.foreach { t => + t.iterator.foreach { t => if (hhFilter(extractor(t))) hh += t else @@ -247,7 +249,7 @@ class HeavyHittersCachingSummer[K, V]( def addAll(vals: TraversableOnce[T]): Future[Iterable[T]] = { // todo not sure if need to increment as backing summer may already be doing it - insertOp.incr + insertOp.incr() val (hh, nonHH) = approxHH.splitTraversableOnce(vals, { t: T => t._1.hashCode }) if (!hh.isEmpty) { diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala index 133f880eb..976069cea 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/NullSummer.scala @@ -18,6 +18,8 @@ package com.twitter.algebird.util.summer import com.twitter.algebird._ import com.twitter.util.Future +import scala.collection.compat._ + /** * @author * Ian O Connell @@ -30,8 +32,8 @@ class NullSummer[Key, Value](tuplesIn: Incrementor, tuplesOut: Incrementor)(impl def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = { val r = Semigroup - .sumOption(vals.map { inV => - tuplesIn.incr + .sumOption(vals.iterator.map { inV => + tuplesIn.incr() Map(inV) }) .getOrElse(Map.empty) diff --git a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala index b1e268342..cefbc8d89 100644 --- a/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala +++ b/algebird-util/src/main/scala/com/twitter/algebird/util/summer/SyncSummingQueue.scala @@ -20,8 +20,9 @@ import java.util.concurrent.ArrayBlockingQueue import com.twitter.algebird._ import com.twitter.util.Future -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer +import scala.collection.compat._ /** * @author @@ -56,8 +57,8 @@ case class SyncSummingQueue[Key, Value]( def addAll(vals: TraversableOnce[(Key, Value)]): Future[Map[Key, Value]] = { val outputs = squeue - .put(Monoid.sum(vals.map { i => - tuplesIn.incr + .put(Monoid.sum(vals.iterator.map { i => + tuplesIn.incr() Map(i) })) .getOrElse(Map.empty) @@ -80,10 +81,10 @@ class CustomSummingQueue[V](capacity: Int, sizeIncr: Incrementor, putCalls: Incr */ final def put(item: V): Option[V] = if (queueOption.isDefined) { - putCalls.incr + putCalls.incr() queueOption.flatMap { queue => if (!queue.offer(item)) { - sizeIncr.incr + sizeIncr.incr() // Queue is full, do the work: Monoid.plus(flush, Some(item)) } else { diff --git a/build.sbt b/build.sbt index 16e262c7a..4f58509e4 100644 --- a/build.sbt +++ b/build.sbt @@ -41,10 +41,6 @@ val sharedSettings = Seq( organization := "com.twitter", scalaVersion := "2.12.16", crossScalaVersions := Seq("2.11.12", scalaVersion.value), - resolvers ++= Seq( - Opts.resolver.sonatypeSnapshots, - Opts.resolver.sonatypeReleases - ), Test / parallelExecution := true, scalacOptions ++= Seq( "-unchecked", diff --git a/scripts/ntuple_generators.rb b/scripts/ntuple_generators.rb index 068f60e7c..678e78791 100755 --- a/scripts/ntuple_generators.rb +++ b/scripts/ntuple_generators.rb @@ -141,10 +141,10 @@ def get_sumoption(n, bufferSize) end.join(", ") "override def sumOption(#{method_params}) = { - if (to.isEmpty) None + if (to.iterator.isEmpty) None else { #{buffers} - to.foreach { tuple => #{put_statements} } + to.iterator.foreach { tuple => #{put_statements} } Some((#{gets_commaed})) } }" @@ -239,6 +239,8 @@ def print_implicit_definitions puts puts "import ArrayBufferedOperation.fromSumOption" puts +puts "import scala.collection.compat._" +puts print_class_definitions puts print_implicit_definitions From 120961796e4ede6ddd7589a5cb1bc47c2dee27f8 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 5 Mar 2023 19:57:11 +0000 Subject: [PATCH 288/306] Update scala 2.12 and 2.13 versions (#1108) --- .github/workflows/ci.yml | 10 +++++----- build.sbt | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 244a8451f..6fb0700e4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,8 +30,8 @@ jobs: - 17 scala: - 2.11.12 - - 2.12.16 - - 2.13.8 + - 2.12.17 + - 2.13.10 test-coverage: runs-on: ubuntu-latest steps: @@ -42,7 +42,7 @@ jobs: distribution: "temurin" java-version: 17 - run: | - sbt ++2.12.16 coverage test coverageReport + sbt ++2.12.17 coverage test coverageReport bash <(curl -s https://codecov.io/bash) mimaReport: runs-on: ubuntu-latest @@ -62,8 +62,8 @@ jobs: - 17 scala: - 2.11.12 - - 2.12.16 - - 2.13.8 + - 2.12.17 + - 2.13.10 microsite: runs-on: ubuntu-latest steps: diff --git a/build.sbt b/build.sbt index 4f58509e4..bb188b9cd 100644 --- a/build.sbt +++ b/build.sbt @@ -39,7 +39,7 @@ crossScalaVersions := Nil val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.16", + scalaVersion := "2.12.17", crossScalaVersions := Seq("2.11.12", scalaVersion.value), Test / parallelExecution := true, scalacOptions ++= Seq( @@ -205,7 +205,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.8", + crossScalaVersions += "2.13.10", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -235,7 +235,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( Test / testOptions ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.8", + crossScalaVersions += "2.13.10", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -266,14 +266,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.8", + crossScalaVersions += "2.13.10", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.8", + crossScalaVersions += "2.13.10", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -290,7 +290,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.8", + crossScalaVersions += "2.13.10", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.10", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From 19f30c230092c4950ff7242faf87e2579d579231 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 16 Mar 2023 11:05:14 +0100 Subject: [PATCH 289/306] Update JavaEWAH to 1.2.3 (#1110) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index bb188b9cd..ce803439b 100644 --- a/build.sbt +++ b/build.sbt @@ -4,7 +4,7 @@ import pl.project13.scala.sbt.JmhPlugin val algebraVersion = "2.0.0" val bijectionVersion = "0.9.7" -val javaEwahVersion = "1.1.13" +val javaEwahVersion = "1.2.3" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" From 1b02953f29c9047998e50e7ff96eb3952a6c4dc6 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 14 Jun 2023 23:20:50 +0200 Subject: [PATCH 290/306] Update sbt-jmh to 0.4.5 (#1120) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 9162c53a6..5bff27a4b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -10,7 +10,7 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7") -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.4") +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11") From 97eacd0692aae09aea15e711d329d9d08af6418d Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 15 Jun 2023 00:07:52 +0200 Subject: [PATCH 291/306] Update sbt-scoverage to 2.0.8 (#1117) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5bff27a4b..4f482167c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -9,7 +9,7 @@ addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.2") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.7") +addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.8") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11") From 77b08f95d43558c33a432ada64894878e8209155 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 15 Jun 2023 00:08:11 +0200 Subject: [PATCH 292/306] Update sbt-scalafix to 0.11.0 (#1116) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4f482167c..4ac14cfc1 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,7 +11,7 @@ addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.8") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") -addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.4") +addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.11.0") addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11") dependencyOverrides += "org.scala-lang.modules" %% "scala-xml" % "2.1.0" From 8c4b5bc46870595731e790910f9d36d1707a41e9 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 15 Jun 2023 00:08:27 +0200 Subject: [PATCH 293/306] Update scalatest to 3.2.16 (#1115) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index ce803439b..2ae86956a 100644 --- a/build.sbt +++ b/build.sbt @@ -8,7 +8,7 @@ val javaEwahVersion = "1.2.3" val kindProjectorVersion = "0.13.2" val paradiseVersion = "2.1.1" val quasiquotesVersion = "2.1.0" -val scalaTestVersion = "3.2.15" +val scalaTestVersion = "3.2.16" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" val scalaCollectionCompat = "2.9.0" From f644ba451a9892858df1b1c57fa530a843e9bf20 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Thu, 15 Jun 2023 00:08:52 +0200 Subject: [PATCH 294/306] Update scala-collection-compat to 2.10.0 (#1113) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 2ae86956a..f713fea62 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.16" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.9.0" +val scalaCollectionCompat = "2.10.0" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From 3bafa68210392182ce767af324691691be7cdba4 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 1 Jul 2023 20:13:22 +0200 Subject: [PATCH 295/306] Update sbt-mima-plugin to 1.1.2 (#1112) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 4ac14cfc1..a3b52b249 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -8,7 +8,7 @@ resolvers ++= Seq( addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.2") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.1") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.8") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.11.0") From ac7ee767ec9d0838994d6cc248c39305f241128a Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 1 Jul 2023 20:46:33 +0200 Subject: [PATCH 296/306] Update sbt-ci-release to 1.5.12 (#1114) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index a3b52b249..271de7e51 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -12,6 +12,6 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.8") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.5") addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.11.0") -addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.11") +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") dependencyOverrides += "org.scala-lang.modules" %% "scala-xml" % "2.1.0" From 6b7deac7fe391b11d823a54b190c812cfef18eab Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Sat, 1 Jul 2023 20:48:47 +0200 Subject: [PATCH 297/306] Update scala-collection-compat to 2.11.0 (#1121) --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f713fea62..3122a94c4 100644 --- a/build.sbt +++ b/build.sbt @@ -11,7 +11,7 @@ val quasiquotesVersion = "2.1.0" val scalaTestVersion = "3.2.16" val scalaTestPlusVersion = "3.1.0.0-RC2" val scalacheckVersion = "1.15.2" -val scalaCollectionCompat = "2.10.0" +val scalaCollectionCompat = "2.11.0" val utilVersion = "21.2.0" val sparkVersion = "2.4.8" From 165e517937af0b396369e0a1d547d5802c72859e Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sat, 1 Jul 2023 15:29:38 -0400 Subject: [PATCH 298/306] Update scala 2.13 to v2.13.11 (#1124) Co-authored-by: Scala Steward --- .github/workflows/ci.yml | 4 ++-- build.sbt | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6fb0700e4..0372c6616 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: scala: - 2.11.12 - 2.12.17 - - 2.13.10 + - 2.13.11 test-coverage: runs-on: ubuntu-latest steps: @@ -63,7 +63,7 @@ jobs: scala: - 2.11.12 - 2.12.17 - - 2.13.10 + - 2.13.11 microsite: runs-on: ubuntu-latest steps: diff --git a/build.sbt b/build.sbt index 3122a94c4..0ba21d001 100644 --- a/build.sbt +++ b/build.sbt @@ -205,7 +205,7 @@ def module(name: String) = { } lazy val algebirdCore = module("core").settings( - crossScalaVersions += "2.13.10", + crossScalaVersions += "2.13.11", initialCommands := """ import com.twitter.algebird._ """.stripMargin('|'), @@ -235,7 +235,7 @@ lazy val algebirdCore = module("core").settings( lazy val algebirdTest = module("test") .settings( Test / testOptions ++= Seq(Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "4")), - crossScalaVersions += "2.13.10", + crossScalaVersions += "2.13.11", libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % scalacheckVersion, @@ -266,14 +266,14 @@ lazy val algebirdBenchmark = module("benchmark") lazy val algebirdUtil = module("util") .settings( - crossScalaVersions += "2.13.10", + crossScalaVersions += "2.13.11", libraryDependencies ++= Seq("com.twitter" %% "util-core" % utilVersion) ) .dependsOn(algebirdCore, algebirdTest % "test->test") lazy val algebirdBijection = module("bijection") .settings( - crossScalaVersions += "2.13.10", + crossScalaVersions += "2.13.11", libraryDependencies += "com.twitter" %% "bijection-core" % bijectionVersion ) .dependsOn(algebirdCore, algebirdTest % "test->test") @@ -290,7 +290,7 @@ lazy val algebirdSpark = module("spark") lazy val algebirdGeneric = module("generic") .settings( - crossScalaVersions += "2.13.10", + crossScalaVersions += "2.13.11", libraryDependencies ++= Seq( "com.chuusai" %% "shapeless" % "2.3.10", "com.github.alexarchambault" %% "scalacheck-shapeless_1.14" % "1.2.5" From ebba4a8414b5c64c7161bdf7772ca8788c16606d Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 2 Jul 2023 21:09:17 -0400 Subject: [PATCH 299/306] Update scalafmt to v3.7.5 (#1125) --- .scalafmt.conf | 2 +- .../com/twitter/algebird/CombinatorTest.scala | 2 +- .../algebird/TupleAggregatorsTest.scala | 1610 +++++++++-------- .../algebird/immutable/BitSetTest.scala | 4 +- 4 files changed, 853 insertions(+), 765 deletions(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index c9f903c4f..f8dafaf16 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.6.0 +version=3.7.5 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { diff --git a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala index ed54d0671..0ab10af5d 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/CombinatorTest.scala @@ -58,7 +58,7 @@ class CombinatorTest extends CheckProperties { val lc = m(l) val rc = m(r) if (lc == rc) l > r else lc > rc - // Probably only approximately true with this cut-off + // Probably only approximately true with this cut-off } .take(40) .toSet diff --git a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala index 3867b66c9..e5c780cbd 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/TupleAggregatorsTest.scala @@ -321,25 +321,29 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 17 aggregators" in { - val agg: Aggregator[Int, Tuple17[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]] = + val agg: Aggregator[ + Int, + Tuple17[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int] + ] = Tuple17( MinAgg, MinAgg, @@ -363,26 +367,30 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 18 aggregators" in { - val agg: Aggregator[Int, Tuple18[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]] = + val agg: Aggregator[ + Int, + Tuple18[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int] + ] = Tuple18( MinAgg, MinAgg, @@ -407,47 +415,51 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 19 aggregators" in { - val agg: Aggregator[Int, Tuple19[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple19[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, + val agg: Aggregator[ Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = Tuple19( + Tuple19[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple19[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = Tuple19( MinAgg, MinAgg, MinAgg, @@ -472,49 +484,53 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 20 aggregators" in { - val agg: Aggregator[Int, Tuple20[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple20[ + val agg: Aggregator[ Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = Tuple20( + Tuple20[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple20[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = Tuple20( MinAgg, MinAgg, MinAgg, @@ -540,51 +556,55 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 21 aggregators" in { - val agg: Aggregator[Int, Tuple21[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, + val agg: Aggregator[ Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple21[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = Tuple21( + Tuple21[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple21[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = Tuple21( MinAgg, MinAgg, MinAgg, @@ -611,53 +631,57 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 22 aggregators" in { - val agg: Aggregator[Int, Tuple22[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple22[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, + val agg: Aggregator[ Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = Tuple22( + Tuple22[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple22[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = Tuple22( MinAgg, MinAgg, MinAgg, @@ -993,25 +1017,29 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 17 aggregators" in { - val agg: Aggregator[Int, Tuple17[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]] = + val agg: Aggregator[ + Int, + Tuple17[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int] + ] = MultiAggregator( ( MinAgg, @@ -1037,26 +1065,30 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 18 aggregators" in { - val agg: Aggregator[Int, Tuple18[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]] = + val agg: Aggregator[ + Int, + Tuple18[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int] + ] = MultiAggregator( ( MinAgg, @@ -1083,47 +1115,51 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 19 aggregators" in { - val agg: Aggregator[Int, Tuple19[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple19[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = MultiAggregator( + val agg: Aggregator[ + Int, + Tuple19[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple19[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = MultiAggregator( ( MinAgg, MinAgg, @@ -1150,49 +1186,53 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 20 aggregators" in { - val agg: Aggregator[Int, Tuple20[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple20[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = MultiAggregator( + val agg: Aggregator[ + Int, + Tuple20[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple20[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = MultiAggregator( ( MinAgg, MinAgg, @@ -1220,51 +1260,55 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 21 aggregators" in { - val agg: Aggregator[Int, Tuple21[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple21[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = MultiAggregator( + val agg: Aggregator[ + Int, + Tuple21[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple21[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = MultiAggregator( ( MinAgg, MinAgg, @@ -1293,53 +1337,57 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create an aggregator from a tuple of 22 aggregators" in { - val agg: Aggregator[Int, Tuple22[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ], Tuple22[ - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int, - Int - ]] = MultiAggregator( + val agg: Aggregator[ + Int, + Tuple22[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ], + Tuple22[ + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int, + Int + ] + ] = MultiAggregator( ( MinAgg, MinAgg, @@ -1541,21 +1589,25 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 13 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple13[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple13[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long]] = + val agg: MonoidAggregator[ + Long, + Tuple13[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple13[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long] + ] = MultiAggregator( ( SizeAgg, @@ -1577,22 +1629,26 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 14 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple14[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple14[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long]] = + val agg: MonoidAggregator[ + Long, + Tuple14[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple14[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long] + ] = MultiAggregator( ( SizeAgg, @@ -1615,23 +1671,27 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 15 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple15[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple15[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long]] = + val agg: MonoidAggregator[ + Long, + Tuple15[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple15[Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long, Long] + ] = MultiAggregator( ( SizeAgg, @@ -1655,41 +1715,45 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 16 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple16[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple16[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple16[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple16[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, @@ -1713,43 +1777,47 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 17 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple17[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple17[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple17[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple17[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, @@ -1774,45 +1842,49 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 18 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple18[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple18[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple18[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple18[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, @@ -1838,47 +1910,51 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 19 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple19[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple19[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple19[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple19[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, @@ -1905,49 +1981,53 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 20 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple20[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple20[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple20[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple20[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, @@ -1975,51 +2055,55 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 21 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple21[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple21[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple21[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple21[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, @@ -2048,53 +2132,57 @@ class TupleAggregatorsTest extends AnyWordSpec with Matchers { } "Create a MonoidAggregator from a tuple of 22 MonoidAggregators" in { - val agg: MonoidAggregator[Long, Tuple22[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ], Tuple22[ - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long, - Long - ]] = MultiAggregator( + val agg: MonoidAggregator[ + Long, + Tuple22[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ], + Tuple22[ + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long, + Long + ] + ] = MultiAggregator( ( SizeAgg, SizeAgg, diff --git a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala index bfc13fb67..aa0dd3e72 100644 --- a/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala +++ b/algebird-test/src/test/scala/com/twitter/algebird/immutable/BitSetTest.scala @@ -177,7 +177,7 @@ object BitSetTest extends Properties("BitSet") { property("(x | y)(z) == x(z) || y(z)") = forAll { (x: BitSet, y: BitSet, z: Int) => // do apply first in case we mutate erroneously def law(z: Int): Boolean = - (x(z) || y(z)) == (x | y) (z) + (x(z) || y(z)) == (x | y)(z) law(z) && x.iterator.forall(law) && y.iterator.forall(law) } @@ -202,7 +202,7 @@ object BitSetTest extends Properties("BitSet") { property("(x & y)(z) == x(z) && y(z)") = forAll { (x: BitSet, y: BitSet, z: Int) => // do apply first in case we mutate erroneously def law(z: Int): Boolean = - (x(z) && y(z)) == (x & y) (z) + (x(z) && y(z)) == (x & y)(z) law(z) && x.iterator.forall(law) && y.iterator.forall(law) } From 910f47196c0ebb9c289d9c5296ec50b9d5744005 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Mon, 3 Jul 2023 10:34:29 -0400 Subject: [PATCH 300/306] Update scala-library, scala-reflect to 2.12.18 (#1126) * Update scala-library, scala-reflect to 2.12.18 * Update CI --------- Co-authored-by: Scala Steward --- .github/workflows/ci.yml | 6 +++--- build.sbt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0372c6616..54c3b68ee 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,7 @@ jobs: - 17 scala: - 2.11.12 - - 2.12.17 + - 2.12.18 - 2.13.11 test-coverage: runs-on: ubuntu-latest @@ -42,7 +42,7 @@ jobs: distribution: "temurin" java-version: 17 - run: | - sbt ++2.12.17 coverage test coverageReport + sbt ++2.12.18 coverage test coverageReport bash <(curl -s https://codecov.io/bash) mimaReport: runs-on: ubuntu-latest @@ -62,7 +62,7 @@ jobs: - 17 scala: - 2.11.12 - - 2.12.17 + - 2.12.18 - 2.13.11 microsite: runs-on: ubuntu-latest diff --git a/build.sbt b/build.sbt index 0ba21d001..7f41a314b 100644 --- a/build.sbt +++ b/build.sbt @@ -39,7 +39,7 @@ crossScalaVersions := Nil val sharedSettings = Seq( organization := "com.twitter", - scalaVersion := "2.12.17", + scalaVersion := "2.12.18", crossScalaVersions := Seq("2.11.12", scalaVersion.value), Test / parallelExecution := true, scalacOptions ++= Seq( From d7871798155345baed511537bae8b35d30ff2e47 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:46:39 +0200 Subject: [PATCH 301/306] Update sbt to 1.7.3 (#1090) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 22af2628c..6a9f03889 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.1 +sbt.version=1.7.3 From d26af9f861ab4461412a1554aa2272a8a0c499db Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 5 Jul 2023 13:18:03 +0200 Subject: [PATCH 302/306] Update scalafmt-core to 3.7.6 (#1128) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index f8dafaf16..e0b86e549 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.7.5 +version=3.7.6 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" { From fdcad883157700190d4af67cfbbe9269c2eb0fce Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 5 Jul 2023 13:20:43 +0200 Subject: [PATCH 303/306] Update sbt-microsites to 1.4.3 (#1111) --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 271de7e51..fbbc86a0c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,7 +5,7 @@ resolvers ++= Seq( ) ) -addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.2") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.3") addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.2") From 6cf45abd018e2180c85368fcb40d9ab42c4f5ca4 Mon Sep 17 00:00:00 2001 From: Filipe Regadas Date: Sun, 9 Jul 2023 23:20:24 +0100 Subject: [PATCH 304/306] Update sbt-microsites to 1.4.3 (#1129) Co-authored-by: Scala Steward From 028606349ecd18a3e7dae0bc043330b4cd5894e5 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 10 Jul 2023 00:20:37 +0200 Subject: [PATCH 305/306] Update sbt to 1.9.1 (#1130) --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 6a9f03889..3c0b78a7c 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.7.3 +sbt.version=1.9.1 From faf123ba59f62d1d7b344336a80e09804f26bd21 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Mon, 10 Jul 2023 00:20:57 +0200 Subject: [PATCH 306/306] Update scalafmt-core to 3.7.7 (#1131) --- .scalafmt.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scalafmt.conf b/.scalafmt.conf index e0b86e549..394f11ac7 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,4 @@ -version=3.7.6 +version=3.7.7 runner.dialect = scala212 fileOverride { "glob:**/scala-2.13*/**" {