From ff2ea592b435eedec6c7112239e4bf82e486e0fe Mon Sep 17 00:00:00 2001 From: Yannick Heiber Date: Sun, 30 Jun 2019 13:08:17 +0200 Subject: [PATCH 1/8] Make IndexedReaderWriterStateT.flatMap stack-safe when F is --- .../cats/data/IndexedReaderWriterStateT.scala | 21 +++++++++++++++++-- .../IndexedReaderWriterStateTSuite.scala | 16 ++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala b/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala index 8d05f1f1aa..4e07f6b043 100644 --- a/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala +++ b/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala @@ -90,7 +90,7 @@ final class IndexedReaderWriterStateT[F[_], E, L, SA, SB, A](val runF: F[(E, SA) def flatMap[SC, B]( f: A => IndexedReaderWriterStateT[F, E, L, SB, SC, B] )(implicit F: FlatMap[F], L: Semigroup[L]): IndexedReaderWriterStateT[F, E, L, SA, SC, B] = - IndexedReaderWriterStateT.applyF { + IndexedReaderWriterStateT.shift { F.map(runF) { rwsfa => (e: E, sa: SA) => F.flatMap(rwsfa(e, sa)) { case (la, sb, a) => @@ -108,7 +108,7 @@ final class IndexedReaderWriterStateT[F[_], E, L, SA, SB, A](val runF: F[(E, SA) * Like [[map]], but allows the mapping function to return an effectful value. */ def flatMapF[B](faf: A => F[B])(implicit F: FlatMap[F]): IndexedReaderWriterStateT[F, E, L, SA, SB, B] = - IndexedReaderWriterStateT.applyF { + IndexedReaderWriterStateT.shift { F.map(runF) { rwsfa => (e: E, sa: SA) => F.flatMap(rwsfa(e, sa)) { case (l, sb, a) => @@ -390,6 +390,23 @@ object IndexedReaderWriterStateT extends IRWSTInstances with CommonIRWSTConstruc def modifyF[F[_], E, L, SA, SB](f: SA => F[SB])(implicit F: Applicative[F], L: Monoid[L]): IndexedReaderWriterStateT[F, E, L, SA, SB, Unit] = IndexedReaderWriterStateT((_, s) => F.map(f(s))((L.empty, _, ()))) + + /** + * Internal API — shifts the execution of `run` in the `F` context. + * + * Used to build IndexedReaderWriterStateT values for `F[_]` data types that implement `Monad`, + * in which case it is safer to trigger the `F[_]` context earlier. + * + * This is needed for [[IndexedReaderWriterStateT.flatMap]] to be stack-safe when the underlying F[_] is, + * for further explanation see [[Kleisli.shift]]. + */ + private[data] def shift[F[_], E, L, SA, SB, A](runF: F[(E, SA) => F[(L, SB, A)]])(implicit F: FlatMap[F]): IndexedReaderWriterStateT[F, E, L, SA, SB, A] = + F match { + case ap: Applicative[F] @unchecked => + IndexedReaderWriterStateT.apply[F, E, L, SA, SB, A]((e: E, sa: SA) => F.flatMap(runF)(f => f(e, sa)))(ap) + case _ => + IndexedReaderWriterStateT.applyF(runF) + } } abstract private[data] class RWSTFunctions extends CommonIRWSTConstructors { diff --git a/tests/src/test/scala/cats/tests/IndexedReaderWriterStateTSuite.scala b/tests/src/test/scala/cats/tests/IndexedReaderWriterStateTSuite.scala index b2d258ea05..acd8ef2864 100644 --- a/tests/src/test/scala/cats/tests/IndexedReaderWriterStateTSuite.scala +++ b/tests/src/test/scala/cats/tests/IndexedReaderWriterStateTSuite.scala @@ -29,6 +29,22 @@ class ReaderWriterStateTSuite extends CatsSuite { rws.runS("context", 0).value should ===(70001) } + test("flatMap is stack-safe on repeated left binds when F is") { + val ns = (0 to 70000).toList + val one = addLogUnit(1) + val rws = ns.foldLeft(one)((acc, _) => acc.flatMap(_ => one)) + + rws.runS("context", 0).value should ===(70002) + } + + test("flatMap is stack-safe on repeated right binds when F is") { + val ns = (0 to 70000).toList + val one = addLogUnit(1) + val rws = ns.foldLeft(one)((acc, _) => one.flatMap(_ => acc)) + + rws.runS("context", 0).value should ===(70002) + } + test("map2 combines logs") { forAll { (rwsa: ReaderWriterState[String, Vector[Int], Int, Int], From 78bad91db4d11a85672b3c1d9bb6f22d6788b296 Mon Sep 17 00:00:00 2001 From: Yannick Heiber Date: Sun, 30 Jun 2019 13:10:44 +0200 Subject: [PATCH 2/8] Make IndexedStateT.map stack-safe by using AndThen --- core/src/main/scala/cats/data/IndexedStateT.scala | 2 +- tests/src/test/scala/cats/tests/IndexedStateTSuite.scala | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/cats/data/IndexedStateT.scala b/core/src/main/scala/cats/data/IndexedStateT.scala index 2610d0259e..b9159a52db 100644 --- a/core/src/main/scala/cats/data/IndexedStateT.scala +++ b/core/src/main/scala/cats/data/IndexedStateT.scala @@ -101,7 +101,7 @@ final class IndexedStateT[F[_], SA, SB, A](val runF: F[SA => F[(SB, A)]]) extend */ def transform[B, SC](f: (SB, A) => (SC, B))(implicit F: Functor[F]): IndexedStateT[F, SA, SC, B] = IndexedStateT.applyF(F.map(runF) { sfsa => - sfsa.andThen { fsa => + AndThen(sfsa).andThen { fsa => F.map(fsa) { case (s, a) => f(s, a) } } }) diff --git a/tests/src/test/scala/cats/tests/IndexedStateTSuite.scala b/tests/src/test/scala/cats/tests/IndexedStateTSuite.scala index b2193d04c5..9bc8a971da 100644 --- a/tests/src/test/scala/cats/tests/IndexedStateTSuite.scala +++ b/tests/src/test/scala/cats/tests/IndexedStateTSuite.scala @@ -263,6 +263,15 @@ class IndexedStateTSuite extends CatsSuite { private val stackSafeTestSize = if (Platform.isJvm) 100000 else 100 + test("repeated map is stack safe") { + val unit = StateT.pure[Eval, Unit, Int](0) + val count = stackSafeTestSize + val result = (0 until count).foldLeft(unit) { (acc, _) => + acc.map(_ + 1) + } + result.run(()).value should ===(((), count)) + } + test("flatMap is stack safe on repeated left binds when F is") { val unit = StateT.pure[Eval, Unit, Unit](()) val count = stackSafeTestSize From 3c671ad368b02e26eab02e8fa8101bcae4dc609e Mon Sep 17 00:00:00 2001 From: Yannick Heiber Date: Sun, 30 Jun 2019 13:23:35 +0200 Subject: [PATCH 3/8] Reformatting from prePR --- .../cats/data/IndexedReaderWriterStateT.scala | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala b/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala index 4e07f6b043..9667d43019 100644 --- a/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala +++ b/core/src/main/scala/cats/data/IndexedReaderWriterStateT.scala @@ -392,15 +392,17 @@ object IndexedReaderWriterStateT extends IRWSTInstances with CommonIRWSTConstruc IndexedReaderWriterStateT((_, s) => F.map(f(s))((L.empty, _, ()))) /** - * Internal API — shifts the execution of `run` in the `F` context. - * - * Used to build IndexedReaderWriterStateT values for `F[_]` data types that implement `Monad`, - * in which case it is safer to trigger the `F[_]` context earlier. - * - * This is needed for [[IndexedReaderWriterStateT.flatMap]] to be stack-safe when the underlying F[_] is, - * for further explanation see [[Kleisli.shift]]. - */ - private[data] def shift[F[_], E, L, SA, SB, A](runF: F[(E, SA) => F[(L, SB, A)]])(implicit F: FlatMap[F]): IndexedReaderWriterStateT[F, E, L, SA, SB, A] = + * Internal API — shifts the execution of `run` in the `F` context. + * + * Used to build IndexedReaderWriterStateT values for `F[_]` data types that implement `Monad`, + * in which case it is safer to trigger the `F[_]` context earlier. + * + * This is needed for [[IndexedReaderWriterStateT.flatMap]] to be stack-safe when the underlying F[_] is, + * for further explanation see [[Kleisli.shift]]. + */ + private[data] def shift[F[_], E, L, SA, SB, A]( + runF: F[(E, SA) => F[(L, SB, A)]] + )(implicit F: FlatMap[F]): IndexedReaderWriterStateT[F, E, L, SA, SB, A] = F match { case ap: Applicative[F] @unchecked => IndexedReaderWriterStateT.apply[F, E, L, SA, SB, A]((e: E, sa: SA) => F.flatMap(runF)(f => f(e, sa)))(ap) From e131e535d04378c2657e4d68e81a657705406dd1 Mon Sep 17 00:00:00 2001 From: Aleksey Troitskiy Date: Fri, 12 Jul 2019 20:46:12 +0300 Subject: [PATCH 4/8] fix #2940 (#2942) --- free/src/main/scala/cats/free/Cofree.scala | 2 +- free/src/test/scala/cats/free/CofreeSuite.scala | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/free/src/main/scala/cats/free/Cofree.scala b/free/src/main/scala/cats/free/Cofree.scala index f56ef00885..b2e9e95e44 100644 --- a/free/src/main/scala/cats/free/Cofree.scala +++ b/free/src/main/scala/cats/free/Cofree.scala @@ -80,7 +80,7 @@ object Cofree extends CofreeInstances { * A stack-safe algebraic recursive fold out of the cofree comonad. */ def cata[F[_], A, B](cof: Cofree[F, A])(folder: (A, F[B]) => Eval[B])(implicit F: Traverse[F]): Eval[B] = - F.traverse(cof.tailForced)(cata(_)(folder)).flatMap(folder(cof.head, _)) + F.traverse(cof.tailForced)(c => Eval.defer(cata(c)(folder))).flatMap(folder(cof.head, _)) /** * A monadic recursive fold out of the cofree comonad into a monad which can express Eval's stack-safety. diff --git a/free/src/test/scala/cats/free/CofreeSuite.scala b/free/src/test/scala/cats/free/CofreeSuite.scala index e54db3ff7b..eb2f7810a9 100644 --- a/free/src/test/scala/cats/free/CofreeSuite.scala +++ b/free/src/test/scala/cats/free/CofreeSuite.scala @@ -108,6 +108,19 @@ class CofreeSuite extends CatsSuite { cata should ===(nelUnfoldedHundred) } + test("Cofree.cata is stack-safe") { + val unfolded = Cofree.unfold[Option, Int](0)(i => if (i == 50000) None else Some(i + 1)) + val sum = List.tabulate(50000)(identity).sum + val cata = + Cofree + .cata[Option, Int, Int](unfolded)( + (i, lb) => Eval.now(lb.fold(0)(_ + i)) + ) + .value + + cata should ===(sum) + } + test("Cofree.cataM") { type EvalOption[A] = OptionT[Eval, A] From e00b22488ee6700f603c21bcf50297879c75ea91 Mon Sep 17 00:00:00 2001 From: Alireza Meskin Date: Fri, 12 Jul 2019 22:02:49 +0200 Subject: [PATCH 5/8] Update README.md to add phony library (#2944) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 882f49817e..ebdce36c3a 100644 --- a/README.md +++ b/README.md @@ -170,6 +170,7 @@ By sharing the same set of type classes, instances and data types provided by Ca * [log4cats](https://github.com/ChristopherDavenport/log4cats): functional logging * [monadic-html](https://github.com/OlivierBlanvillain/monadic-html): Tiny DOM binding library for Scala.js * [Monix](https://github.com/monix/monix): high-performance library for composing asynchronous and event-based programs + * [phony](https://github.com/alirezameskin/phony): Fake data generator * [pureconfig](https://github.com/pureconfig/pureconfig): A boilerplate-free library for loading configuration files * [rainier](https://github.com/stripe/rainier): Bayesian inference in Scala * [scala-forex](https://github.com/snowplow/scala-forex): exchange rate lookups From aaed15c2a540a798f0e3a12712417e704f27171c Mon Sep 17 00:00:00 2001 From: Sean Daru Date: Fri, 19 Jul 2019 17:50:29 +0900 Subject: [PATCH 6/8] =?UTF-8?q?Update=20"README.md"=20for=20Scala=202.13.0?= =?UTF-8?q?=20compiler=20specification=20changing(a=E2=80=A6=20(#2949)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update "README.md" for Scala 2.13.0 compiler specification changing(about partial unification). #2948 * Correct the sentence. #2948 --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ebdce36c3a..e4aef812a8 100644 --- a/README.md +++ b/README.md @@ -67,12 +67,14 @@ Silver Sponsors are those who have pledged $2,000 to $5,000. Cats is currently available for Scala 2.10 (up to 1.2.x), 2.11, 2.12, 2.13.0, and [Scala.js](http://www.scala-js.org/). -Cats relies on improved type inference via the fix for [SI-2712](https://github.com/scala/bug/issues/2712), which is not enabled by default. For **Scala 2.11.9 or later** you should add the following to your `build.sbt`: +Cats relies on improved type inference via the fix for [SI-2712](https://github.com/scala/bug/issues/2712), which is not enabled by default. For **Scala 2.11.9+ or 2.12** you should add the following to your `build.sbt`: ```scala scalacOptions += "-Ypartial-unification" ``` +(Partial unification is on by default since Scala 2.13, the compiler no longer accepts `-Ypartial-unification`) + **Or**, if you need to support older versions of Scala you can use the [sbt-partial-unification](https://github.com/fiadliel/sbt-partial-unification#sbt-partial-unification) plugin which extends support back through **Scala 2.10.6 or later**, to add it, simply add this line to your `plugins.sbt`: ```scala From c40c0a379118f63cc19ec66257978e0fbc17ee05 Mon Sep 17 00:00:00 2001 From: tanaka takaya Date: Sat, 20 Jul 2019 04:51:31 +0900 Subject: [PATCH 7/8] Add `init` and `last` to `NonEmptyChain` like `NonEmptyList`. (#2953) --- core/src/main/scala/cats/data/Chain.scala | 36 +++++++++++++++++++ .../main/scala/cats/data/NonEmptyChain.scala | 19 ++++++++-- .../test/scala/cats/tests/ChainSuite.scala | 6 ++++ .../scala/cats/tests/NonEmptyChainSuite.scala | 12 +++++++ 4 files changed, 71 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/cats/data/Chain.scala b/core/src/main/scala/cats/data/Chain.scala index fb1594a588..0333ac404a 100644 --- a/core/src/main/scala/cats/data/Chain.scala +++ b/core/src/main/scala/cats/data/Chain.scala @@ -47,11 +47,47 @@ sealed abstract class Chain[+A] { result } + /** + * Returns the init and last of this Chain if non empty, none otherwise. Amortized O(1). + */ + final def initLast: Option[(Chain[A], A)] = { + var c: Chain[A] = this + val lefts = new collection.mutable.ArrayBuffer[Chain[A]] + // scalastyle:off null + var result: Option[(Chain[A], A)] = null + while (result eq null) { + c match { + case Singleton(a) => + val pre = + if (lefts.isEmpty) nil + else lefts.reduceLeft((x, y) => Append(x, y)) + result = Some(pre -> a) + case Append(l, r) => c = r; lefts += l + case Wrap(seq) => + val init = fromSeq(seq.init) + val pre = + if (lefts.isEmpty) init + else lefts.reduceLeft((x, y) => Append(x, y)) ++ init + result = Some((pre, seq.last)) + case Empty => + // Empty is only top level, it is never internal to an Append + result = None + } + } + // scalastyle:on null + result + } + /** * Returns the head of this Chain if non empty, none otherwise. Amortized O(1). */ def headOption: Option[A] = uncons.map(_._1) + /** + * Returns the last of this Chain if non empty, none otherwise. Amortized O(1). + */ + final def lastOption: Option[A] = initLast.map(_._2) + /** * Returns true if there are no elements in this collection. */ diff --git a/core/src/main/scala/cats/data/NonEmptyChain.scala b/core/src/main/scala/cats/data/NonEmptyChain.scala index bce726b7b4..85f66f8d60 100644 --- a/core/src/main/scala/cats/data/NonEmptyChain.scala +++ b/core/src/main/scala/cats/data/NonEmptyChain.scala @@ -191,15 +191,30 @@ class NonEmptyChainOps[A](private val value: NonEmptyChain[A]) extends AnyVal { final def uncons: (A, Chain[A]) = toChain.uncons.get /** - * Returns the first element of this chain. + * Returns the init and last of this NonEmptyChain. Amortized O(1). + */ + final def initLast: (Chain[A], A) = toChain.initLast.get + + /** + * Returns the first element of this NonEmptyChain. Amortized O(1). */ final def head: A = uncons._1 /** - * Returns all but the first element of this chain. + * Returns all but the first element of this NonEmptyChain. Amortized O(1). */ final def tail: Chain[A] = uncons._2 + /** + * Returns all but the last element of this NonEmptyChain. Amortized O(1). + */ + final def init: Chain[A] = initLast._1 + + /** + * Returns the last element of this NonEmptyChain. Amortized O(1). + */ + final def last: A = initLast._2 + /** * Tests if some element is contained in this chain. * {{{ diff --git a/tests/src/test/scala/cats/tests/ChainSuite.scala b/tests/src/test/scala/cats/tests/ChainSuite.scala index c6069efd8e..2b870fa18c 100644 --- a/tests/src/test/scala/cats/tests/ChainSuite.scala +++ b/tests/src/test/scala/cats/tests/ChainSuite.scala @@ -68,6 +68,12 @@ class ChainSuite extends CatsSuite { } } + test("lastOption") { + forAll { (c: Chain[Int]) => + c.lastOption should ===(c.toList.lastOption) + } + } + test("size is consistent with toList.size") { forAll { (ci: Chain[Int]) => ci.size.toInt should ===(ci.toList.size) diff --git a/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala b/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala index b9939ee25c..5a2ea55e28 100644 --- a/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala +++ b/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala @@ -139,4 +139,16 @@ class NonEmptyChainSuite extends CatsSuite { ci.distinct.toList should ===(ci.toList.distinct) } } + + test("init") { + forAll { ci: NonEmptyChain[Int] => + ci.init.toList should ===(ci.toList.init) + } + } + + test("last") { + forAll { ci: NonEmptyChain[Int] => + ci.last should ===(ci.toList.last) + } + } } From 7e94169bbd842e7fe541385ac9651fbd8702d86f Mon Sep 17 00:00:00 2001 From: "Kai(luo) Wang" Date: Fri, 19 Jul 2019 22:39:42 +0200 Subject: [PATCH 8/8] Added `NonEmptyLazyList` to replace `NonEmptyStream` (#2941) * make NonEmptyStreamSuite 2.12- specific * reformat * partially converted NonEmptyLazyList from NonEmptyList * better TODOs and replaced occurrances of Nil * first pass at intended impl * fixed imports * removed collectFirstSome. Defers to default instance impl * reduceLeftTo and reduceRightTo now uses same impl from NonEmptyChain * refactored instances to pull from existing LazyList instances * fix compilation * added missing instances for NonEmptyLazyList * moved NonEmptyChain to new structure * NonEmptyVector is moved to new structure * refactor tests * refactor use a single base trait * moving NonEmptyList to new structure * reformat * maintain BC * Update NonEmptyStreamSuite.scala * fix new kind projector * minor rename to make it more consistent * remove unused file * Update ScalaVersionSpecific.scala * Update ScalaVersionSpecific.scala * correct filename * restore NonEmptyList and NonEmptyVector * more reversal * refactor shared code between NonEmptyChain and NonEmptyVector * fix cross compile for NonEmptyVector * reformat * rename * fix BC and compiler warning * make pacakge more restrictive. * make package private more restrictive * remove old work around no longer needed * removed another work around for 2.10 no longer needed * added link to original newtype lib * added more comments * reformat --- .../data/ScalaVersionSpecificPackage.scala | 5 + .../cats/data/NonEmptyLazyList.scala | 384 ++++++++++++++++++ .../data/ScalaVersionSpecificPackage.scala | 7 + .../cats/data/AbstractNonEmptyInstances.scala | 81 ++++ core/src/main/scala/cats/data/Newtype.scala | 2 + .../main/scala/cats/data/NonEmptyChain.scala | 123 ++---- .../main/scala/cats/data/NonEmptyList.scala | 1 + .../scala/cats/data/NonEmptyMapImpl.scala | 4 +- .../main/scala/cats/data/NonEmptySet.scala | 5 +- .../main/scala/cats/data/NonEmptyVector.scala | 3 +- core/src/main/scala/cats/data/package.scala | 2 +- .../compat/scalaVersionMoreSpecific.scala | 1 + .../kernel/instances/StreamInstances.scala | 1 + .../discipline/ScalaVersionSpecific.scala | 7 + .../discipline/ScalaVersionSpecific.scala | 19 + .../{Arbitrary.scala => arbitrary.scala} | 3 +- .../cats/tests/NonEmptyStreamSuite.scala | 169 ++++++++ .../cats/tests/NonEmptyLazyListSuite.scala | 137 +++++++ .../scala/cats/tests/NonEmptyChainSuite.scala | 7 + .../test/scala/cats/tests/OneAndSuite.scala | 183 +-------- .../test/scala/cats/tests/ParallelSuite.scala | 3 + 21 files changed, 863 insertions(+), 284 deletions(-) create mode 100644 core/src/main/scala-2.12-/cats/data/ScalaVersionSpecificPackage.scala create mode 100644 core/src/main/scala-2.13+/cats/data/NonEmptyLazyList.scala create mode 100644 core/src/main/scala-2.13+/cats/data/ScalaVersionSpecificPackage.scala create mode 100644 core/src/main/scala/cats/data/AbstractNonEmptyInstances.scala create mode 100644 laws/src/main/scala-2.12-/cats/laws/discipline/ScalaVersionSpecific.scala create mode 100644 laws/src/main/scala-2.13+/cats/laws/discipline/ScalaVersionSpecific.scala rename laws/src/main/scala/cats/laws/discipline/{Arbitrary.scala => arbitrary.scala} (99%) create mode 100644 tests/src/test/scala-2.12-/cats/tests/NonEmptyStreamSuite.scala create mode 100644 tests/src/test/scala-2.13+/cats/tests/NonEmptyLazyListSuite.scala diff --git a/core/src/main/scala-2.12-/cats/data/ScalaVersionSpecificPackage.scala b/core/src/main/scala-2.12-/cats/data/ScalaVersionSpecificPackage.scala new file mode 100644 index 0000000000..24a811e068 --- /dev/null +++ b/core/src/main/scala-2.12-/cats/data/ScalaVersionSpecificPackage.scala @@ -0,0 +1,5 @@ +package cats + +package data + +abstract private[data] class ScalaVersionSpecificPackage diff --git a/core/src/main/scala-2.13+/cats/data/NonEmptyLazyList.scala b/core/src/main/scala-2.13+/cats/data/NonEmptyLazyList.scala new file mode 100644 index 0000000000..351d456165 --- /dev/null +++ b/core/src/main/scala-2.13+/cats/data/NonEmptyLazyList.scala @@ -0,0 +1,384 @@ +package cats +package data + +import NonEmptyLazyList.create +import kernel.PartialOrder +import instances.lazyList._ + +import scala.collection.immutable.TreeSet + +object NonEmptyLazyList extends NonEmptyLazyListInstances { + + // The following 3 types are components of a technique to + // create a no-boxing newtype. It's coped from the + // newtypes lib by @alexknvl + // For more detail see https://github.com/alexknvl/newtypes + private[data] type Base + private[data] trait Tag extends Any + /* aliased in data package as NonEmptyLazyList */ + type Type[+A] <: Base with Tag + + private[data] def create[A](s: LazyList[A]): Type[A] = + s.asInstanceOf[Type[A]] + + private[data] def unwrap[A](s: Type[A]): LazyList[A] = + s.asInstanceOf[LazyList[A]] + + def fromLazyList[A](as: LazyList[A]): Option[NonEmptyLazyList[A]] = + if (as.nonEmpty) Option(create(as)) else None + + def fromLazyListUnsafe[A](ll: LazyList[A]): NonEmptyLazyList[A] = + if (ll.nonEmpty) create(ll) + else throw new IllegalArgumentException("Cannot create NonEmptyLazyList from empty LazyList") + + def fromNonEmptyList[A](as: NonEmptyList[A]): NonEmptyLazyList[A] = + create(LazyList.from(as.toList)) + + def fromNonEmptyVector[A](as: NonEmptyVector[A]): NonEmptyLazyList[A] = + create(LazyList.from(as.toVector)) + + def fromSeq[A](as: Seq[A]): Option[NonEmptyLazyList[A]] = + if (as.nonEmpty) Option(create(LazyList.from(as))) else None + + def fromLazyListPrepend[A](a: A, ca: LazyList[A]): NonEmptyLazyList[A] = + create(a +: ca) + + def fromLazyListAppend[A](ca: LazyList[A], a: A): NonEmptyLazyList[A] = + create(ca :+ a) + + def apply[A](a: => A, as: A*): NonEmptyLazyList[A] = + create(LazyList.concat(LazyList(a), LazyList.from(as))) + + implicit def catsNonEmptyLazyListOps[A](value: NonEmptyLazyList[A]): NonEmptyLazyListOps[A] = + new NonEmptyLazyListOps(value) +} + +class NonEmptyLazyListOps[A](private val value: NonEmptyLazyList[A]) extends AnyVal { + + /** + * Converts this NonEmptyLazyList to a `LazyList` + */ + final def toLazyList: LazyList[A] = NonEmptyLazyList.unwrap(value) + + final def map[B](f: A => B): NonEmptyLazyList[B] = create(toLazyList.map(f)) + + /** + * Returns the last element + */ + final def last: A = toLazyList.last + + /** + * Returns all elements but the last + */ + final def init: LazyList[A] = toLazyList.init + + /** + * Returns the size of this NonEmptyLazyList + */ + final def size: Int = toLazyList.size + + /** + * Returns the length of this NonEmptyLazyList + */ + final def length: Int = toLazyList.length + + /** + * Returns a new NonEmptyLazyList consisting of `a` followed by this + */ + final def prepend[AA >: A](a: AA): NonEmptyLazyList[AA] = + create(a #:: toLazyList) + + /** + * Alias for [[prepend]]. + */ + final def +:[AA >: A](a: AA): NonEmptyLazyList[AA] = + prepend(a) + + /** + * Alias for [[prepend]]. + */ + final def #::[AA >: A](a: AA): NonEmptyLazyList[AA] = + prepend(a) + + /** + * Returns a new NonEmptyLazyList consisting of this followed by `a` + */ + final def append[AA >: A](a: AA): NonEmptyLazyList[AA] = + create(toLazyList :+ a) + + /** + * Alias for [[append]]. + */ + final def :+[AA >: A](a: AA): NonEmptyLazyList[AA] = + append(a) + + /** + * concatenates this with `ll` + */ + final def concat[AA >: A](ll: LazyList[AA]): NonEmptyLazyList[AA] = + create(toLazyList ++ ll) + + /** + * Concatenates this with `nell` + */ + final def concatNell[AA >: A](nell: NonEmptyLazyList[AA]): NonEmptyLazyList[AA] = + create(toLazyList ++ nell.toLazyList) + + /** + * Alias for concatNell + */ + final def ++[AA >: A](nell: NonEmptyLazyList[AA]): NonEmptyLazyList[AA] = + concatNell(nell) + + /** + * Appends the given LazyList + */ + final def appendLazyList[AA >: A](nell: LazyList[AA]): NonEmptyLazyList[AA] = + if (nell.isEmpty) value + else create(toLazyList ++ nell) + + /** + * Alias for `appendLazyList` + */ + final def :++[AA >: A](c: LazyList[AA]): NonEmptyLazyList[AA] = + appendLazyList(c) + + /** + * Prepends the given LazyList + */ + final def prependLazyList[AA >: A](c: LazyList[AA]): NonEmptyLazyList[AA] = + if (c.isEmpty) value + else create(c ++ toLazyList) + + /** + * Prepends the given NonEmptyLazyList + */ + final def prependNell[AA >: A](c: NonEmptyLazyList[AA]): NonEmptyLazyList[AA] = + create(c.toLazyList ++ toLazyList) + + /** + * Alias for `prependNell` + */ + final def ++:[AA >: A](c: NonEmptyLazyList[AA]): NonEmptyLazyList[AA] = + prependNell(c) + + /** + * Converts this NonEmptyLazyList to a `NonEmptyList`. + */ // TODO also add toNonEmptyLazyList to NonEmptyList? + final def toNonEmptyList: NonEmptyList[A] = + NonEmptyList.fromListUnsafe(toLazyList.toList) + + /** + * Converts this LazyList to a `NonEmptyVector`. + */ + final def toNonEmptyVector: NonEmptyVector[A] = + NonEmptyVector.fromVectorUnsafe(toLazyList.toVector) + + /** + * Returns the first element + */ + final def head: A = toLazyList.head + + /** + * Returns all but the first element + */ + final def tail: LazyList[A] = toLazyList.tail + + /** + * Tests if some element is contained in this NonEmptyLazyList + */ + final def contains(a: A)(implicit A: Eq[A]): Boolean = + toLazyList.contains(a) + + /** + * Tests whether a predicate holds for all elements + */ + final def forall(p: A => Boolean): Boolean = + toLazyList.forall(p) + + /** + * Tests whether a predicate holds for at least one element of this LazyList + */ + final def exists(f: A => Boolean): Boolean = + toLazyList.exists(f) + + /** + * Returns the first value that matches the given predicate. + */ + final def find(f: A => Boolean): Option[A] = + toLazyList.find(f) + + /** + * Returns a new `LazyList` containing all elements where the result of `pf` is final defined. + */ + final def collect[B](pf: PartialFunction[A, B]): LazyList[B] = + toLazyList.collect(pf) + + /** + * Finds the first element of this `NonEmptyLazyList` for which the given partial + * function is defined, and applies the partial function to it. + */ + final def collectLazyList[B](pf: PartialFunction[A, B]): Option[B] = toLazyList.collectFirst(pf) + + /** + * Filters all elements of this NonEmptyLazyList that do not satisfy the given predicate. + */ + final def filter(p: A => Boolean): LazyList[A] = toLazyList.filter(p) + + /** + * Filters all elements of this NonEmptyLazyList that satisfy the given predicate. + */ + final def filterNot(p: A => Boolean): LazyList[A] = filter(t => !p(t)) + + /** + * Left-associative fold using f. + */ + final def foldLeft[B](b: B)(f: (B, A) => B): B = + toLazyList.foldLeft(b)(f) + + /** + * Right-associative fold using f. + */ + final def foldRight[B](z: B)(f: (A, B) => B): B = + toLazyList.foldRight(z)(f) + + /** + * Left-associative reduce using f. + */ + final def reduceLeft(f: (A, A) => A): A = + toLazyList.reduceLeft(f) + + /** + * Apply `f` to the "initial element" of this LazyList and lazily combine it + * with every other value using the given function `g`. + */ + final def reduceLeftTo[B](f: A => B)(g: (B, A) => B): B = { + val iter = toLazyList.iterator + var result = f(iter.next) + while (iter.hasNext) { result = g(result, iter.next) } + result + } + + /** + * Right-associative reduce using f. + */ + final def reduceRight[AA >: A](f: (A, AA) => AA): AA = + toLazyList.reduceRight(f) + + /** + * Apply `f` to the "initial element" of this NonEmptyLazyList and + * lazily combine it with every other value using the given function `g`. + */ + final def reduceRightTo[B](f: A => B)(g: (A, B) => B): B = { + val iter = toLazyList.reverseIterator + var result = f(iter.next) + while (iter.hasNext) { result = g(iter.next, result) } + result + } + + /** + * Reduce using the Semigroup of A + */ + final def reduce[AA >: A](implicit S: Semigroup[AA]): AA = + S.combineAllOption(iterator).get + + /** + * Applies the supplied function to each element and returns a new NonEmptyLazyList from the concatenated results + */ + final def flatMap[B](f: A => NonEmptyLazyList[B]): NonEmptyLazyList[B] = + create(toLazyList.flatMap(f.andThen(_.toLazyList))) + + /** + * Zips this `NonEmptyLazyList` with another `NonEmptyLazyList` and applies a function for each pair of elements + */ + final def zipWith[B, C](b: NonEmptyLazyList[B])(f: (A, B) => C): NonEmptyLazyList[C] = + create(toLazyList.zip(b.toLazyList).map { case (a, b) => f(a, b) }) + + /** + * Zips each element of this `NonEmptyLazyList` with its index + */ + final def zipWithIndex: NonEmptyLazyList[(A, Int)] = + create(toLazyList.zipWithIndex) + + final def iterator: Iterator[A] = toLazyList.iterator + + final def reverseIterator: Iterator[A] = toLazyList.reverseIterator + + /** + * Reverses this `NonEmptyLazyList` + */ + final def reverse: NonEmptyLazyList[A] = + create(toLazyList.reverse) + + /** + * Remove duplicates. Duplicates are checked using `Order[_]` instance. + */ + def distinct[AA >: A](implicit O: Order[AA]): NonEmptyLazyList[AA] = { + implicit val ord = O.toOrdering + + val buf = LazyList.newBuilder[AA] + toLazyList.foldLeft(TreeSet.empty[AA]) { (elementsSoFar, a) => + if (elementsSoFar(a)) elementsSoFar + else { + buf += a; elementsSoFar + a + } + } + + create(buf.result()) + } +} + +sealed abstract private[data] class NonEmptyLazyListInstances extends NonEmptyLazyListInstances1 { + + implicit val catsDataInstancesForNonEmptyLazyList + : Bimonad[NonEmptyLazyList] with NonEmptyTraverse[NonEmptyLazyList] with SemigroupK[NonEmptyLazyList] = + new AbstractNonEmptyInstances[LazyList, NonEmptyLazyList] { + + def extract[A](fa: NonEmptyLazyList[A]): A = fa.head + + def nonEmptyTraverse[G[_]: Apply, A, B](fa: NonEmptyLazyList[A])(f: A => G[B]): G[NonEmptyLazyList[B]] = + Foldable[LazyList] + .reduceRightToOption[A, G[LazyList[B]]](fa.tail)(a => Apply[G].map(f(a))(LazyList.apply(_))) { (a, lglb) => + Apply[G].map2Eval(f(a), lglb)(_ +: _) + } + .map { + case None => Apply[G].map(f(fa.head))(h => create(LazyList(h))) + case Some(gtail) => Apply[G].map2(f(fa.head), gtail)((h, t) => create(LazyList(h) ++ t)) + } + .value + + def reduceLeftTo[A, B](fa: NonEmptyLazyList[A])(f: A => B)(g: (B, A) => B): B = fa.reduceLeftTo(f)(g) + + def reduceRightTo[A, B](fa: NonEmptyLazyList[A])(f: A => B)(g: (A, cats.Eval[B]) => cats.Eval[B]): cats.Eval[B] = + Eval.defer(fa.reduceRightTo(a => Eval.now(f(a))) { (a, b) => + Eval.defer(g(a, b)) + }) + } + + implicit def catsDataOrderForNonEmptyLazyList[A: Order]: Order[NonEmptyLazyList[A]] = + Order[LazyList[A]].asInstanceOf[Order[NonEmptyLazyList[A]]] + + implicit def catsDataSemigroupForNonEmptyLazyList[A]: Semigroup[NonEmptyLazyList[A]] = + Semigroup[LazyList[A]].asInstanceOf[Semigroup[NonEmptyLazyList[A]]] + + implicit def catsDataShowForNonEmptyLazyList[A](implicit A: Show[A]): Show[NonEmptyLazyList[A]] = + Show.show[NonEmptyLazyList[A]](nec => s"NonEmpty${Show[LazyList[A]].show(nec.toLazyList)}") + +} + +sealed abstract private[data] class NonEmptyLazyListInstances1 extends NonEmptyLazyListInstances2 { + + implicit def catsDataHashForNonEmptyLazyList[A: Hash]: Hash[NonEmptyLazyList[A]] = + Hash[LazyList[A]].asInstanceOf[Hash[NonEmptyLazyList[A]]] + +} + +sealed abstract private[data] class NonEmptyLazyListInstances2 extends NonEmptyLazyListInstances3 { + implicit def catsDataPartialOrderForNonEmptyLazyList[A: PartialOrder]: PartialOrder[NonEmptyLazyList[A]] = + PartialOrder[LazyList[A]].asInstanceOf[PartialOrder[NonEmptyLazyList[A]]] +} + +sealed abstract private[data] class NonEmptyLazyListInstances3 { + implicit def catsDataEqForNonEmptyLazyList[A: Eq]: Eq[NonEmptyLazyList[A]] = + Eq[LazyList[A]].asInstanceOf[Eq[NonEmptyLazyList[A]]] +} diff --git a/core/src/main/scala-2.13+/cats/data/ScalaVersionSpecificPackage.scala b/core/src/main/scala-2.13+/cats/data/ScalaVersionSpecificPackage.scala new file mode 100644 index 0000000000..6ca3d1911e --- /dev/null +++ b/core/src/main/scala-2.13+/cats/data/ScalaVersionSpecificPackage.scala @@ -0,0 +1,7 @@ +package cats + +package data + +abstract private[data] class ScalaVersionSpecificPackage { + type NonEmptyLazyList[+A] = NonEmptyLazyList.Type[A] +} diff --git a/core/src/main/scala/cats/data/AbstractNonEmptyInstances.scala b/core/src/main/scala/cats/data/AbstractNonEmptyInstances.scala new file mode 100644 index 0000000000..57a65ecf42 --- /dev/null +++ b/core/src/main/scala/cats/data/AbstractNonEmptyInstances.scala @@ -0,0 +1,81 @@ +package cats +package data + +abstract private[data] class AbstractNonEmptyInstances[F[_], NonEmptyF[_]](implicit MF: Monad[F], + CF: CoflatMap[F], + TF: Traverse[F], + SF: SemigroupK[F]) + extends Bimonad[NonEmptyF] + with NonEmptyTraverse[NonEmptyF] + with SemigroupK[NonEmptyF] { + val monadInstance = MF.asInstanceOf[Monad[NonEmptyF]] + val coflatMapInstance = CF.asInstanceOf[CoflatMap[NonEmptyF]] + val traverseInstance = Traverse[F].asInstanceOf[Traverse[NonEmptyF]] + val semiGroupKInstance = SemigroupK[F].asInstanceOf[SemigroupK[NonEmptyF]] + + def combineK[A](a: NonEmptyF[A], b: NonEmptyF[A]): NonEmptyF[A] = + semiGroupKInstance.combineK(a, b) + + def pure[A](x: A): NonEmptyF[A] = monadInstance.pure(x) + + override def map[A, B](fa: NonEmptyF[A])(f: A => B): NonEmptyF[B] = monadInstance.map(fa)(f) + + def flatMap[A, B](fa: NonEmptyF[A])(f: A => NonEmptyF[B]): NonEmptyF[B] = + monadInstance.flatMap(fa)(f) + + override def map2[A, B, Z](fa: NonEmptyF[A], fb: NonEmptyF[B])(f: (A, B) => Z): NonEmptyF[Z] = + monadInstance.map2(fa, fb)(f) + + override def map2Eval[A, B, Z](fa: NonEmptyF[A], fb: Eval[NonEmptyF[B]])(f: (A, B) => Z): Eval[NonEmptyF[Z]] = + monadInstance.map2Eval(fa, fb)(f) + + def coflatMap[A, B](fa: NonEmptyF[A])(f: NonEmptyF[A] => B): NonEmptyF[B] = + coflatMapInstance.coflatMap(fa)(f) + + def tailRecM[A, B](a: A)(f: A => NonEmptyF[Either[A, B]]): NonEmptyF[B] = + monadInstance.tailRecM(a)(f) + + def foldLeft[A, B](fa: NonEmptyF[A], b: B)(f: (B, A) => B): B = + traverseInstance.foldLeft(fa, b)(f) + + def foldRight[A, B](fa: NonEmptyF[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = + traverseInstance.foldRight(fa, lb)(f) + + override def foldMap[A, B](fa: NonEmptyF[A])(f: A => B)(implicit B: Monoid[B]): B = + traverseInstance.foldMap(fa)(f) + + override def traverse[G[_], A, B](fa: NonEmptyF[A])(f: A => G[B])(implicit G: Applicative[G]): G[NonEmptyF[B]] = + traverseInstance.traverse(fa)(f) + + override def mapWithIndex[A, B](fa: NonEmptyF[A])(f: (A, Int) => B): NonEmptyF[B] = + traverseInstance.mapWithIndex(fa)(f) + + override def zipWithIndex[A](fa: NonEmptyF[A]): NonEmptyF[(A, Int)] = traverseInstance.zipWithIndex(fa) + + override def exists[A](fa: NonEmptyF[A])(p: A => Boolean): Boolean = traverseInstance.exists(fa)(p) + + override def forall[A](fa: NonEmptyF[A])(p: A => Boolean): Boolean = traverseInstance.forall(fa)(p) + + override def get[A](fa: NonEmptyF[A])(idx: Long): Option[A] = traverseInstance.get(fa)(idx) + + override def isEmpty[A](fa: NonEmptyF[A]): Boolean = false + + override def foldM[G[_], A, B](fa: NonEmptyF[A], z: B)(f: (B, A) => G[B])(implicit G: Monad[G]): G[B] = + traverseInstance.foldM(fa, z)(f) + + override def fold[A](fa: NonEmptyF[A])(implicit A: Monoid[A]): A = traverseInstance.fold(fa) + + override def toList[A](fa: NonEmptyF[A]): List[A] = traverseInstance.toList(fa) + + override def reduceLeftOption[A](fa: NonEmptyF[A])(f: (A, A) => A): Option[A] = + traverseInstance.reduceLeftOption(fa)(f) + + override def find[A](fa: NonEmptyF[A])(f: A => Boolean): Option[A] = traverseInstance.find(fa)(f) + + override def collectFirst[A, B](fa: NonEmptyF[A])(pf: PartialFunction[A, B]): Option[B] = + traverseInstance.collectFirst(fa)(pf) + + override def collectFirstSome[A, B](fa: NonEmptyF[A])(f: A => Option[B]): Option[B] = + traverseInstance.collectFirstSome(fa)(f) + +} diff --git a/core/src/main/scala/cats/data/Newtype.scala b/core/src/main/scala/cats/data/Newtype.scala index ff8c0236c8..3aa8566357 100644 --- a/core/src/main/scala/cats/data/Newtype.scala +++ b/core/src/main/scala/cats/data/Newtype.scala @@ -4,6 +4,8 @@ package data /** * Helper trait for `newtype`s. These allow you to create a zero-allocation wrapper around a specific type. * Similar to `AnyVal` value classes, but never have any runtime overhead. + * It's coped from the newtypes lib by @alexknvl + * For more detail see https://github.com/alexknvl/newtypes */ private[data] trait Newtype { self => private[data] type Base diff --git a/core/src/main/scala/cats/data/NonEmptyChain.scala b/core/src/main/scala/cats/data/NonEmptyChain.scala index 85f66f8d60..2c6709ee37 100644 --- a/core/src/main/scala/cats/data/NonEmptyChain.scala +++ b/core/src/main/scala/cats/data/NonEmptyChain.scala @@ -17,24 +17,26 @@ package cats package data -import NonEmptyChainImpl.{create, unwrap} -import cats.Order +import NonEmptyChainImpl.create +import cats.{Order, Semigroup} import cats.kernel._ -import scala.annotation.tailrec import scala.collection.immutable._ -import scala.collection.mutable.ListBuffer private[data] object NonEmptyChainImpl extends NonEmptyChainInstances { - + // The following 3 types are components of a technique to + // create a no-boxing newtype. It's coped from the + // newtypes lib by @alexknvl + // For more detail see https://github.com/alexknvl/newtypes private[data] type Base private[data] trait Tag extends Any + /* aliased in data package as NonEmptyChain */ type Type[+A] <: Base with Tag - private[cats] def create[A](s: Chain[A]): Type[A] = + private[data] def create[A](s: Chain[A]): Type[A] = s.asInstanceOf[Type[A]] - private[cats] def unwrap[A](s: Type[A]): Chain[A] = + private[data] def unwrap[A](s: Type[A]): Chain[A] = s.asInstanceOf[Chain[A]] def fromChain[A](as: Chain[A]): Option[NonEmptyChain[A]] = @@ -403,48 +405,17 @@ class NonEmptyChainOps[A](private val value: NonEmptyChain[A]) extends AnyVal { /** * Remove duplicates. Duplicates are checked using `Order[_]` instance. */ - final def distinct[AA >: A](implicit O: Order[AA]): NonEmptyChain[AA] = { - implicit val ord = O.toOrdering - - var alreadyIn = TreeSet(head: AA) + final def distinct[AA >: A](implicit O: Order[AA]): NonEmptyChain[AA] = + create(toChain.distinct[AA]) - foldLeft(NonEmptyChain(head: AA)) { (elementsSoFar, b) => - if (alreadyIn.contains(b)) { - elementsSoFar - } else { - alreadyIn += b - elementsSoFar :+ b - } - } - } } sealed abstract private[data] class NonEmptyChainInstances extends NonEmptyChainInstances1 { + implicit val catsDataInstancesForNonEmptyChain : SemigroupK[NonEmptyChain] with NonEmptyTraverse[NonEmptyChain] with Bimonad[NonEmptyChain] = - new SemigroupK[NonEmptyChain] with NonEmptyTraverse[NonEmptyChain] with Bimonad[NonEmptyChain] { - - def combineK[A](a: NonEmptyChain[A], b: NonEmptyChain[A]): NonEmptyChain[A] = - a ++ b - - def pure[A](x: A): NonEmptyChain[A] = NonEmptyChain.one(x) - - def flatMap[A, B](fa: NonEmptyChain[A])(f: A => NonEmptyChain[B]): NonEmptyChain[B] = - fa.flatMap(f) - - def tailRecM[A, B](a: A)(f: A => NonEmptyChain[Either[A, B]]): NonEmptyChain[B] = - create(Monad[Chain].tailRecM(a)(a => unwrap(f(a)))) - - def extract[A](x: NonEmptyChain[A]): A = x.head - - def coflatMap[A, B](fa: NonEmptyChain[A])(f: NonEmptyChain[A] => B): NonEmptyChain[B] = { - @tailrec def go(as: Chain[A], res: ListBuffer[B]): Chain[B] = - as.uncons match { - case Some((h, t)) => go(t, res += f(NonEmptyChain.fromChainPrepend(h, t))) - case None => Chain.fromSeq(res.result()) - } - NonEmptyChain.fromChainPrepend(f(fa), go(fa.tail, ListBuffer.empty)) - } + new AbstractNonEmptyInstances[Chain, NonEmptyChain] { + def extract[A](fa: NonEmptyChain[A]): A = fa.head def nonEmptyTraverse[G[_]: Apply, A, B](fa: NonEmptyChain[A])(f: A => G[B]): G[NonEmptyChain[B]] = Foldable[Chain] @@ -457,76 +428,36 @@ sealed abstract private[data] class NonEmptyChainInstances extends NonEmptyChain } .value - override def map[A, B](fa: NonEmptyChain[A])(f: A => B): NonEmptyChain[B] = - create(fa.toChain.map(f)) - - override def size[A](fa: NonEmptyChain[A]): Long = fa.length - - override def reduceLeft[A](fa: NonEmptyChain[A])(f: (A, A) => A): A = - fa.reduceLeft(f) - - override def reduce[A](fa: NonEmptyChain[A])(implicit A: Semigroup[A]): A = - fa.reduce - def reduceLeftTo[A, B](fa: NonEmptyChain[A])(f: A => B)(g: (B, A) => B): B = fa.reduceLeftTo(f)(g) - def reduceRightTo[A, B](fa: NonEmptyChain[A])(f: A => B)(g: (A, Eval[B]) => Eval[B]): Eval[B] = + def reduceRightTo[A, B](fa: NonEmptyChain[A])(f: A => B)(g: (A, cats.Eval[B]) => cats.Eval[B]): cats.Eval[B] = Eval.defer(fa.reduceRightTo(a => Eval.now(f(a))) { (a, b) => Eval.defer(g(a, b)) }) - - override def foldLeft[A, B](fa: NonEmptyChain[A], b: B)(f: (B, A) => B): B = - fa.foldLeft(b)(f) - - override def foldRight[A, B](fa: NonEmptyChain[A], lb: Eval[B])(f: (A, Eval[B]) => Eval[B]): Eval[B] = - Foldable[Chain].foldRight(fa.toChain, lb)(f) - - override def foldMap[A, B](fa: NonEmptyChain[A])(f: A => B)(implicit B: Monoid[B]): B = - B.combineAll(fa.toChain.iterator.map(f)) - - override def fold[A](fa: NonEmptyChain[A])(implicit A: Monoid[A]): A = - fa.reduce - - override def find[A](fa: NonEmptyChain[A])(f: A => Boolean): Option[A] = - fa.find(f) - - override def forall[A](fa: NonEmptyChain[A])(p: A => Boolean): Boolean = - fa.forall(p) - - override def exists[A](fa: NonEmptyChain[A])(p: A => Boolean): Boolean = - fa.exists(p) - - override def toList[A](fa: NonEmptyChain[A]): List[A] = fa.toChain.toList - - override def toNonEmptyList[A](fa: NonEmptyChain[A]): NonEmptyList[A] = - fa.toNonEmptyList - - override def collectFirst[A, B](fa: NonEmptyChain[A])(pf: PartialFunction[A, B]): Option[B] = - fa.collectFirst(pf) - - override def collectFirstSome[A, B](fa: NonEmptyChain[A])(f: A => Option[B]): Option[B] = - fa.collectFirstSome(f) } implicit def catsDataOrderForNonEmptyChain[A: Order]: Order[NonEmptyChain[A]] = - Order.by[NonEmptyChain[A], Chain[A]](_.toChain) + Order[Chain[A]].asInstanceOf[Order[NonEmptyChain[A]]] + + implicit def catsDataSemigroupForNonEmptyChain[A]: Semigroup[NonEmptyChain[A]] = + Semigroup[Chain[A]].asInstanceOf[Semigroup[NonEmptyChain[A]]] implicit def catsDataShowForNonEmptyChain[A](implicit A: Show[A]): Show[NonEmptyChain[A]] = Show.show[NonEmptyChain[A]](nec => s"NonEmpty${Show[Chain[A]].show(nec.toChain)}") - implicit def catsDataSemigroupForNonEmptyChain[A]: Semigroup[NonEmptyChain[A]] = new Semigroup[NonEmptyChain[A]] { - def combine(x: NonEmptyChain[A], y: NonEmptyChain[A]): NonEmptyChain[A] = x ++ y - } } sealed abstract private[data] class NonEmptyChainInstances1 extends NonEmptyChainInstances2 { + implicit def catsDataHashForNonEmptyChain[A: Hash]: Hash[NonEmptyChain[A]] = + Hash[Chain[A]].asInstanceOf[Hash[NonEmptyChain[A]]] +} + +sealed abstract private[data] class NonEmptyChainInstances2 extends NonEmptyChainInstances3 { implicit def catsDataPartialOrderForNonEmptyChain[A: PartialOrder]: PartialOrder[NonEmptyChain[A]] = - PartialOrder.by[NonEmptyChain[A], Chain[A]](_.toChain) + PartialOrder[Chain[A]].asInstanceOf[PartialOrder[NonEmptyChain[A]]] } -sealed abstract private[data] class NonEmptyChainInstances2 { +sealed abstract private[data] class NonEmptyChainInstances3 { implicit def catsDataEqForNonEmptyChain[A: Eq]: Eq[NonEmptyChain[A]] = - new Eq[NonEmptyChain[A]] { - def eqv(x: NonEmptyChain[A], y: NonEmptyChain[A]): Boolean = x.toChain === y.toChain - } + Eq[Chain[A]].asInstanceOf[Eq[NonEmptyChain[A]]] } diff --git a/core/src/main/scala/cats/data/NonEmptyList.scala b/core/src/main/scala/cats/data/NonEmptyList.scala index 51922ca89f..89ebe57cd3 100644 --- a/core/src/main/scala/cats/data/NonEmptyList.scala +++ b/core/src/main/scala/cats/data/NonEmptyList.scala @@ -4,6 +4,7 @@ package data import cats.data.NonEmptyList.ZipNonEmptyList import cats.instances.list._ import cats.syntax.order._ + import scala.annotation.tailrec import scala.collection.immutable.{SortedMap, TreeMap, TreeSet} import scala.collection.mutable diff --git a/core/src/main/scala/cats/data/NonEmptyMapImpl.scala b/core/src/main/scala/cats/data/NonEmptyMapImpl.scala index 898b6b9bec..aae059c1c0 100644 --- a/core/src/main/scala/cats/data/NonEmptyMapImpl.scala +++ b/core/src/main/scala/cats/data/NonEmptyMapImpl.scala @@ -25,10 +25,10 @@ import scala.collection.immutable._ private[data] object NonEmptyMapImpl extends NonEmptyMapInstances with Newtype2 { - private[cats] def create[K, A](m: SortedMap[K, A]): Type[K, A] = + private[data] def create[K, A](m: SortedMap[K, A]): Type[K, A] = m.asInstanceOf[Type[K, A]] - private[cats] def unwrap[K, A](m: Type[K, A]): SortedMap[K, A] = + private[data] def unwrap[K, A](m: Type[K, A]): SortedMap[K, A] = m.asInstanceOf[SortedMap[K, A]] def fromMap[K: Order, A](as: SortedMap[K, A]): Option[NonEmptyMap[K, A]] = diff --git a/core/src/main/scala/cats/data/NonEmptySet.scala b/core/src/main/scala/cats/data/NonEmptySet.scala index e5961907d6..4e26fdfff0 100644 --- a/core/src/main/scala/cats/data/NonEmptySet.scala +++ b/core/src/main/scala/cats/data/NonEmptySet.scala @@ -26,10 +26,10 @@ import kernel.compat.scalaVersionSpecific._ private[data] object NonEmptySetImpl extends NonEmptySetInstances with Newtype { - private[cats] def create[A](s: SortedSet[A]): Type[A] = + private[data] def create[A](s: SortedSet[A]): Type[A] = s.asInstanceOf[Type[A]] - private[cats] def unwrap[A](s: Type[A]): SortedSet[A] = + private[data] def unwrap[A](s: Type[A]): SortedSet[A] = s.asInstanceOf[SortedSet[A]] def fromSet[A](as: SortedSet[A]): Option[NonEmptySet[A]] = @@ -41,6 +41,7 @@ private[data] object NonEmptySetImpl extends NonEmptySetInstances with Newtype { def of[A](a: A, as: A*)(implicit A: Order[A]): NonEmptySet[A] = create(SortedSet(a +: as: _*)(A.toOrdering)) + def apply[A](head: A, tail: SortedSet[A])(implicit A: Order[A]): NonEmptySet[A] = create(SortedSet(head)(A.toOrdering) ++ tail) def one[A](a: A)(implicit A: Order[A]): NonEmptySet[A] = create(SortedSet(a)(A.toOrdering)) diff --git a/core/src/main/scala/cats/data/NonEmptyVector.scala b/core/src/main/scala/cats/data/NonEmptyVector.scala index eda86274f5..d0d94b3e69 100644 --- a/core/src/main/scala/cats/data/NonEmptyVector.scala +++ b/core/src/main/scala/cats/data/NonEmptyVector.scala @@ -2,9 +2,10 @@ package cats package data import cats.data.NonEmptyVector.ZipNonEmptyVector +import cats.instances.vector._ + import scala.annotation.tailrec import scala.collection.immutable.{TreeSet, VectorBuilder} -import cats.instances.vector._ import kernel.compat.scalaVersionSpecific._ /** diff --git a/core/src/main/scala/cats/data/package.scala b/core/src/main/scala/cats/data/package.scala index 2832468de0..30619dcdfb 100644 --- a/core/src/main/scala/cats/data/package.scala +++ b/core/src/main/scala/cats/data/package.scala @@ -2,7 +2,7 @@ package cats import kernel.compat.scalaVersionSpecific._ import compat.lazyList.toLazyList -package object data { +package object data extends ScalaVersionSpecificPackage { type NonEmptyStream[A] = OneAnd[LazyList, A] type ValidatedNel[+E, +A] = Validated[NonEmptyList[E], A] diff --git a/kernel/src/main/scala-2.13+/cats/kernel/compat/scalaVersionMoreSpecific.scala b/kernel/src/main/scala-2.13+/cats/kernel/compat/scalaVersionMoreSpecific.scala index f8b39f6853..5f8c6aec2a 100644 --- a/kernel/src/main/scala-2.13+/cats/kernel/compat/scalaVersionMoreSpecific.scala +++ b/kernel/src/main/scala-2.13+/cats/kernel/compat/scalaVersionMoreSpecific.scala @@ -2,6 +2,7 @@ package cats.kernel.compat import scala.annotation.{Annotation, StaticAnnotation} private[cats] object scalaVersionMoreSpecific { + /** * a trick to suppress unused import warning for this object */ diff --git a/kernel/src/main/scala/cats/kernel/instances/StreamInstances.scala b/kernel/src/main/scala/cats/kernel/instances/StreamInstances.scala index dcdde43a8e..e8efde0b10 100644 --- a/kernel/src/main/scala/cats/kernel/instances/StreamInstances.scala +++ b/kernel/src/main/scala/cats/kernel/instances/StreamInstances.scala @@ -1,6 +1,7 @@ package cats.kernel package instances import compat.scalaVersionSpecific._ + @suppressUnusedImportWarningForScalaVersionSpecific trait StreamInstances extends StreamInstances1 { implicit def catsKernelStdOrderForStream[A: Order]: Order[LazyList[A]] = diff --git a/laws/src/main/scala-2.12-/cats/laws/discipline/ScalaVersionSpecific.scala b/laws/src/main/scala-2.12-/cats/laws/discipline/ScalaVersionSpecific.scala new file mode 100644 index 0000000000..09bd2d0daa --- /dev/null +++ b/laws/src/main/scala-2.12-/cats/laws/discipline/ScalaVersionSpecific.scala @@ -0,0 +1,7 @@ +package cats +package laws +package discipline + +private[discipline] object ScalaVersionSpecific { + trait ArbitraryInstances +} diff --git a/laws/src/main/scala-2.13+/cats/laws/discipline/ScalaVersionSpecific.scala b/laws/src/main/scala-2.13+/cats/laws/discipline/ScalaVersionSpecific.scala new file mode 100644 index 0000000000..26eaa00240 --- /dev/null +++ b/laws/src/main/scala-2.13+/cats/laws/discipline/ScalaVersionSpecific.scala @@ -0,0 +1,19 @@ +package cats.laws.discipline + +import cats.data.NonEmptyLazyList +import org.scalacheck.{Arbitrary, Cogen} + +private[discipline] object ScalaVersionSpecific { + + trait ArbitraryInstances { + + implicit def catsLawsArbitraryForNonEmptyLazyList[A](implicit A: Arbitrary[A]): Arbitrary[NonEmptyLazyList[A]] = + Arbitrary( + implicitly[Arbitrary[LazyList[A]]].arbitrary + .flatMap(fa => A.arbitrary.map(a => NonEmptyLazyList.fromLazyListPrepend(a, fa))) + ) + implicit def catsLawsCogenForNonEmptyLazyList[A](implicit A: Cogen[A]): Cogen[NonEmptyLazyList[A]] = + Cogen[LazyList[A]].contramap(_.toLazyList) + + } +} diff --git a/laws/src/main/scala/cats/laws/discipline/Arbitrary.scala b/laws/src/main/scala/cats/laws/discipline/arbitrary.scala similarity index 99% rename from laws/src/main/scala/cats/laws/discipline/Arbitrary.scala rename to laws/src/main/scala/cats/laws/discipline/arbitrary.scala index d805e72ad6..1772fec0a4 100644 --- a/laws/src/main/scala/cats/laws/discipline/Arbitrary.scala +++ b/laws/src/main/scala/cats/laws/discipline/arbitrary.scala @@ -4,6 +4,7 @@ package discipline import kernel.compat.scalaVersionSpecific._ import cats.data.NonEmptyList.ZipNonEmptyList import cats.data.NonEmptyVector.ZipNonEmptyVector + import scala.util.{Failure, Success, Try} import scala.collection.immutable.{SortedMap, SortedSet} import cats.data._ @@ -14,7 +15,7 @@ import org.scalacheck.Arbitrary.{arbitrary => getArbitrary} * Arbitrary instances for cats.data */ @suppressUnusedImportWarningForScalaVersionSpecific -object arbitrary extends ArbitraryInstances0 { +object arbitrary extends ArbitraryInstances0 with ScalaVersionSpecific.ArbitraryInstances { // this instance is not available in ScalaCheck 1.13.2. // remove this once a newer version is available. diff --git a/tests/src/test/scala-2.12-/cats/tests/NonEmptyStreamSuite.scala b/tests/src/test/scala-2.12-/cats/tests/NonEmptyStreamSuite.scala new file mode 100644 index 0000000000..33774472eb --- /dev/null +++ b/tests/src/test/scala-2.12-/cats/tests/NonEmptyStreamSuite.scala @@ -0,0 +1,169 @@ +package cats +package tests + +import cats.data.{NonEmptyStream, OneAnd} +import cats.instances.stream._ +import cats.kernel.laws.discipline.{EqTests, SemigroupTests} +import cats.laws.discipline.arbitrary._ +import cats.laws.discipline._ + +class NonEmptyStreamSuite extends CatsSuite { + // Lots of collections here.. telling ScalaCheck to calm down a bit + implicit override val generatorDrivenConfig: PropertyCheckConfiguration = + PropertyCheckConfiguration(minSuccessful = 20, sizeRange = 5) + + checkAll("NonEmptyStream[Int]", EqTests[NonEmptyStream[Int]].eqv) + + checkAll("NonEmptyStream[Int] with Option", + NonEmptyTraverseTests[NonEmptyStream].nonEmptyTraverse[Option, Int, Int, Int, Int, Option, Option]) + checkAll("NonEmptyTraverse[NonEmptyStream[A]]", SerializableTests.serializable(NonEmptyTraverse[NonEmptyStream[*]])) + + checkAll("NonEmptyStream[Int]", ReducibleTests[NonEmptyStream].reducible[Option, Int, Int]) + checkAll("Reducible[NonEmptyStream]", SerializableTests.serializable(Reducible[NonEmptyStream])) + + checkAll("NonEmptyStream[Int]", SemigroupTests[NonEmptyStream[Int]].semigroup) + checkAll("Semigroup[NonEmptyStream[Int]]", SerializableTests.serializable(Semigroup[NonEmptyStream[Int]])) + + { + // Test functor and subclasses don't have implicit conflicts + implicitly[Functor[NonEmptyStream]] + implicitly[Monad[NonEmptyStream]] + implicitly[Comonad[NonEmptyStream]] + } + + implicit val iso2 = SemigroupalTests.Isomorphisms.invariant[NonEmptyStream] + + checkAll("NonEmptyStream[Int]", MonadTests[NonEmptyStream].monad[Int, Int, Int]) + checkAll("Monad[NonEmptyStream[A]]", SerializableTests.serializable(Monad[NonEmptyStream])) + + checkAll("NonEmptyStream[Int]", ComonadTests[NonEmptyStream].comonad[Int, Int, Int]) + checkAll("Comonad[NonEmptyStream[A]]", SerializableTests.serializable(Comonad[NonEmptyStream])) + + test("Show is not empty and is formatted as expected") { + forAll { (nel: NonEmptyStream[Int]) => + nel.show.nonEmpty should ===(true) + nel.show.startsWith("OneAnd(") should ===(true) + nel.show should ===(implicitly[Show[NonEmptyStream[Int]]].show(nel)) + nel.show.contains(nel.head.show) should ===(true) + } + } + + test("Show is formatted correctly") { + val oneAnd = NonEmptyStream("Test") + oneAnd.show should ===(s"OneAnd(Test, Stream())") + } + + test("Creating OneAnd + unwrap is identity") { + forAll { (i: Int, tail: Stream[Int]) => + val stream = i #:: tail + val oneAnd = NonEmptyStream(i, tail: _*) + stream should ===(oneAnd.unwrap) + } + } + + test("NonEmptyStream#find is consistent with Stream#find") { + forAll { (nel: NonEmptyStream[Int], p: Int => Boolean) => + val stream = nel.unwrap + nel.find(p) should ===(stream.find(p)) + } + } + + test("NonEmptyStream#exists is consistent with Stream#exists") { + forAll { (nel: NonEmptyStream[Int], p: Int => Boolean) => + val stream = nel.unwrap + nel.exists(p) should ===(stream.exists(p)) + } + } + + test("NonEmptyStream#forall is consistent with Stream#forall") { + forAll { (nel: NonEmptyStream[Int], p: Int => Boolean) => + val stream = nel.unwrap + nel.forall(p) should ===(stream.forall(p)) + } + } + + test("NonEmptyStream#map is consistent with Stream#map") { + forAll { (nel: NonEmptyStream[Int], p: Int => String) => + val stream = nel.unwrap + nel.map(p).unwrap should ===(stream.map(p)) + } + } + + test("NonEmptyStream#nonEmptyPartition remains sorted") { + forAll { (nes: NonEmptyStream[Int], f: Int => Either[String, String]) => + val nesf = nes.map(f) + val sortedStream = (nesf.head #:: nesf.tail).sorted + val sortedNes = OneAnd(sortedStream.head, sortedStream.tail) + val ior = Reducible[NonEmptyStream].nonEmptyPartition(sortedNes)(identity) + + ior.left.map(xs => xs.sorted should ===(xs)) + ior.right.map(xs => xs.sorted should ===(xs)) + } + } + + test("reduceLeft consistent with foldLeft") { + forAll { (nel: NonEmptyStream[Int], f: (Int, Int) => Int) => + nel.reduceLeft(f) should ===(nel.tail.foldLeft(nel.head)(f)) + } + } + + test("reduceRight consistent with foldRight") { + forAll { (nel: NonEmptyStream[Int], f: (Int, Eval[Int]) => Eval[Int]) => + val got = nel.reduceRight(f).value + val last :: rev = nel.unwrap.toList.reverse + val expected = rev.reverse.foldRight(last)((a, b) => f(a, Now(b)).value) + got should ===(expected) + } + } + + test("reduce consistent with fold") { + forAll { (nel: NonEmptyStream[Int]) => + nel.reduce should ===(nel.fold) + } + } + + test("reduce consistent with reduceK") { + forAll { (nel: NonEmptyStream[Option[Int]]) => + nel.reduce(SemigroupK[Option].algebra[Int]) should ===(nel.reduceK) + } + } + + test("reduceLeftToOption consistent with foldLeft + Option") { + forAll { (nel: NonEmptyStream[Int], f: Int => String, g: (String, Int) => String) => + val expected = nel.tail.foldLeft(Option(f(nel.head))) { (opt, i) => + opt.map(s => g(s, i)) + } + nel.reduceLeftToOption(f)(g) should ===(expected) + } + } + + test("reduceRightToOption consistent with foldRight + Option") { + forAll { (nel: NonEmptyStream[Int], f: Int => String, g: (Int, Eval[String]) => Eval[String]) => + val got = nel.reduceRightToOption(f)(g).value + val last :: rev = nel.unwrap.toList.reverse + val expected = rev.reverse.foldRight(Option(f(last))) { (i, opt) => + opt.map(s => g(i, Now(s)).value) + } + got should ===(expected) + } + } + + test("filter includes elements based on a predicate") { + forAll { (nes: NonEmptyStream[Int], pred: Int => Boolean) => + nes.filter(pred) should ===(nes.unwrap.filter(pred)) + } + } + +} + +class ReducibleNonEmptyStreamSuite extends ReducibleSuite[NonEmptyStream]("NonEmptyStream") { + def iterator[T](nes: NonEmptyStream[T]): Iterator[T] = + (nes.head #:: nes.tail).iterator + + def range(start: Long, endInclusive: Long): NonEmptyStream[Long] = { + // if we inline this we get a bewildering implicit numeric widening + // error message in Scala 2.10 + val tailStart: Long = start + 1L + NonEmptyStream(start, tailStart.to(endInclusive).toStream) + } +} diff --git a/tests/src/test/scala-2.13+/cats/tests/NonEmptyLazyListSuite.scala b/tests/src/test/scala-2.13+/cats/tests/NonEmptyLazyListSuite.scala new file mode 100644 index 0000000000..26edbf10cd --- /dev/null +++ b/tests/src/test/scala-2.13+/cats/tests/NonEmptyLazyListSuite.scala @@ -0,0 +1,137 @@ +package cats +package tests + +import cats.data.NonEmptyLazyList +import cats.kernel.laws.discipline.{EqTests, HashTests, OrderTests, PartialOrderTests, SemigroupTests} +import cats.laws.discipline.{BimonadTests, NonEmptyTraverseTests, SemigroupKTests, SerializableTests} +import cats.laws.discipline.arbitrary._ + +class NonEmptyLazyListSuite extends CatsSuite { + + checkAll("NonEmptyLazyList[Int]", SemigroupTests[NonEmptyLazyList[Int]].semigroup) + checkAll(s"Semigroup[NonEmptyLazyList]", SerializableTests.serializable(Semigroup[NonEmptyLazyList[Int]])) + + checkAll(s"NonEmptyLazyList[Int]", HashTests[NonEmptyLazyList[Int]].hash) + checkAll(s"Hash[NonEmptyLazyList[Int]]", SerializableTests.serializable(Hash[NonEmptyLazyList[Int]])) + + checkAll("NonEmptyLazyList[Int]", SemigroupKTests[NonEmptyLazyList].semigroupK[Int]) + checkAll("SemigroupK[NonEmptyLazyList]", SerializableTests.serializable(SemigroupK[NonEmptyLazyList])) + + checkAll("NonEmptyLazyList[Int] with Option", + NonEmptyTraverseTests[NonEmptyLazyList].nonEmptyTraverse[Option, Int, Int, Int, Int, Option, Option]) + checkAll("NonEmptyTraverse[NonEmptyLazyList]", SerializableTests.serializable(Traverse[NonEmptyLazyList])) + + checkAll("NonEmptyLazyList[Int]", BimonadTests[NonEmptyLazyList].bimonad[Int, Int, Int]) + checkAll("Bimonad[NonEmptyLazyList]", SerializableTests.serializable(Bimonad[NonEmptyLazyList])) + + checkAll("NonEmptyLazyList[Int]", OrderTests[NonEmptyLazyList[Int]].order) + checkAll("Order[NonEmptyLazyList[Int]", SerializableTests.serializable(Order[NonEmptyLazyList[Int]])) + + test("show") { + Show[NonEmptyLazyList[Int]].show(NonEmptyLazyList(1, 2, 3)) should ===("NonEmptyLazyList(1, ?)") + } + checkAll("Show[NonEmptyLazyList[Int]]", SerializableTests.serializable(Show[NonEmptyLazyList[Int]])) + + { + implicit val partialOrder = ListWrapper.partialOrder[Int] + checkAll("NonEmptyLazyList[ListWrapper[Int]]", PartialOrderTests[NonEmptyLazyList[ListWrapper[Int]]].partialOrder) + checkAll("PartialOrder[NonEmptyLazyList[ListWrapper[Int]]", + SerializableTests.serializable(PartialOrder[NonEmptyLazyList[ListWrapper[Int]]])) + } + + { + implicit val eqv = ListWrapper.eqv[Int] + checkAll("NonEmptyLazyList[ListWrapper[Int]]", EqTests[NonEmptyLazyList[ListWrapper[Int]]].eqv) + checkAll("Eq[NonEmptyLazyList[ListWrapper[Int]]", + SerializableTests.serializable(Eq[NonEmptyLazyList[ListWrapper[Int]]])) + } + + test("size is consistent with toLazyList.size") { + forAll { (ci: NonEmptyLazyList[Int]) => + ci.size should ===(ci.toLazyList.size.toLong) + } + } + + test("filterNot and then exists should always be false") { + forAll { (ci: NonEmptyLazyList[Int], f: Int => Boolean) => + ci.filterNot(f).exists(f) should ===(false) + } + } + + test("filter and then forall should always be true") { + forAll { (ci: NonEmptyLazyList[Int], f: Int => Boolean) => + ci.filter(f).forall(f) should ===(true) + } + } + + test("exists should be consistent with find + isDefined") { + forAll { (ci: NonEmptyLazyList[Int], f: Int => Boolean) => + ci.exists(f) should ===(ci.find(f).isDefined) + } + } + + test("filterNot element and then contains should be false") { + forAll { (ci: NonEmptyLazyList[Int], i: Int) => + ci.filterNot(_ === i).contains(i) should ===(false) + } + } + + test("fromNonEmptyVector . toNonEmptyVector is id") { + forAll { (ci: NonEmptyLazyList[Int]) => + NonEmptyLazyList.fromNonEmptyVector(ci.toNonEmptyVector) should ===(ci) + } + } + + test("fromNonEmptyList . toNonEmptyList is id") { + forAll { (ci: NonEmptyLazyList[Int]) => + NonEmptyLazyList.fromNonEmptyList(ci.toNonEmptyList) should ===(ci) + } + } + + test("fromLazyList . toLazyList is Option.some") { + forAll { (ci: NonEmptyLazyList[Int]) => + NonEmptyLazyList.fromLazyList(ci.toLazyList) should ===(Some(ci)) + } + } + + test("fromLazyListUnsafe throws exception when used with empty LazyList") { + Either.catchNonFatal(NonEmptyLazyList.fromLazyListUnsafe(LazyList.empty[Int])).isLeft should ===(true) + } + + test("fromSeq . toList . iterator is id") { + forAll { (ci: NonEmptyLazyList[Int]) => + NonEmptyLazyList.fromSeq(ci.iterator.toList) should ===(Option(ci)) + } + } + + test("zipWith consistent with List#zip and then List#map") { + forAll { (a: NonEmptyLazyList[String], b: NonEmptyLazyList[Int], f: (String, Int) => Int) => + a.zipWith(b)(f).toList should ===(a.toList.zip(b.toList).map { case (x, y) => f(x, y) }) + } + } + + test("reverse . reverse is id") { + forAll { (ci: NonEmptyLazyList[Int]) => + ci.reverse.reverse should ===(ci) + } + } + + test("reverse consistent with LazyList#reverse") { + forAll { (ci: NonEmptyLazyList[Int]) => + ci.reverse.toLazyList should ===(ci.toLazyList.reverse) + } + } + + test("NonEmptyLazyList#distinct is consistent with List#distinct") { + forAll { ci: NonEmptyLazyList[Int] => + ci.distinct.toList should ===(ci.toList.distinct) + } + } +} + +class ReducibleNonEmptyLazyListSuite extends ReducibleSuite[NonEmptyLazyList]("NonEmptyLazyList") { + def iterator[T](nel: NonEmptyLazyList[T]): Iterator[T] = nel.toLazyList.iterator + + def range(start: Long, endInclusive: Long): NonEmptyLazyList[Long] = + NonEmptyLazyList(start, (start + 1L).to(endInclusive): _*) +} diff --git a/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala b/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala index 5a2ea55e28..2d76aa261b 100644 --- a/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala +++ b/tests/src/test/scala/cats/tests/NonEmptyChainSuite.scala @@ -152,3 +152,10 @@ class NonEmptyChainSuite extends CatsSuite { } } } + +class ReducibleNonEmptyChainSuite extends ReducibleSuite[NonEmptyChain]("NonEmptyChain") { + def iterator[T](nel: NonEmptyChain[T]): Iterator[T] = nel.toChain.iterator + + def range(start: Long, endInclusive: Long): NonEmptyChain[Long] = + NonEmptyChain(start, (start + 1L).to(endInclusive): _*) +} diff --git a/tests/src/test/scala/cats/tests/OneAndSuite.scala b/tests/src/test/scala/cats/tests/OneAndSuite.scala index 401fca5ba9..62a6c03aa1 100644 --- a/tests/src/test/scala/cats/tests/OneAndSuite.scala +++ b/tests/src/test/scala/cats/tests/OneAndSuite.scala @@ -1,39 +1,15 @@ package cats package tests -import cats.kernel.laws.discipline.{EqTests, SemigroupTests} - -import cats.instances.stream._ -import cats.data.{NonEmptyStream, OneAnd} -import cats.laws.discipline.{ - ApplicativeTests, - ComonadTests, - FoldableTests, - FunctorTests, - MonadTests, - NonEmptyTraverseTests, - ReducibleTests, - SemigroupKTests, - SemigroupalTests, - SerializableTests, - TraverseTests -} +import cats.data.OneAnd +import cats.laws.discipline._ import cats.laws.discipline.arbitrary._ -import kernel.compat.scalaVersionSpecific._ -import compat.lazyList.toLazyList -@suppressUnusedImportWarningForScalaVersionSpecific class OneAndSuite extends CatsSuite { // Lots of collections here.. telling ScalaCheck to calm down a bit implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 20, sizeRange = 5) - checkAll("OneAnd[Stream, Int]", EqTests[OneAnd[LazyList, Int]].eqv) - - checkAll("OneAnd[Stream, Int] with Option", - NonEmptyTraverseTests[OneAnd[LazyList, *]].nonEmptyTraverse[Option, Int, Int, Int, Int, Option, Option]) - checkAll("NonEmptyTraverse[OneAnd[Stream, A]]", SerializableTests.serializable(NonEmptyTraverse[OneAnd[LazyList, *]])) - { implicit val traverse = OneAnd.catsDataTraverseForOneAnd(ListWrapper.traverse) checkAll("OneAnd[ListWrapper, Int] with Option", @@ -41,9 +17,6 @@ class OneAndSuite extends CatsSuite { checkAll("Traverse[OneAnd[ListWrapper, A]]", SerializableTests.serializable(Traverse[OneAnd[ListWrapper, *]])) } - checkAll("OneAnd[Stream, Int]", ReducibleTests[OneAnd[LazyList, *]].reducible[Option, Int, Int]) - checkAll("Reducible[OneAnd[Stream, *]]", SerializableTests.serializable(Reducible[OneAnd[LazyList, *]])) - implicit val iso = SemigroupalTests.Isomorphisms .invariant[OneAnd[ListWrapper, *]](OneAnd.catsDataFunctorForOneAnd(ListWrapper.functor)) @@ -70,9 +43,7 @@ class OneAndSuite extends CatsSuite { { implicit val alternative = ListWrapper.alternative checkAll("OneAnd[ListWrapper, Int]", SemigroupKTests[OneAnd[ListWrapper, *]].semigroupK[Int]) - checkAll("OneAnd[Stream, Int]", SemigroupTests[OneAnd[LazyList, Int]].semigroup) checkAll("SemigroupK[OneAnd[ListWrapper, A]]", SerializableTests.serializable(SemigroupK[OneAnd[ListWrapper, *]])) - checkAll("Semigroup[NonEmptyStream[Int]]", SerializableTests.serializable(Semigroup[OneAnd[LazyList, Int]])) } { @@ -81,160 +52,10 @@ class OneAndSuite extends CatsSuite { checkAll("Foldable[OneAnd[ListWrapper, A]]", SerializableTests.serializable(Foldable[OneAnd[ListWrapper, *]])) } - { - // Test functor and subclasses don't have implicit conflicts - implicitly[Functor[NonEmptyStream]] - implicitly[Monad[NonEmptyStream]] - implicitly[Comonad[NonEmptyStream]] - } - - implicit val iso2 = SemigroupalTests.Isomorphisms.invariant[OneAnd[LazyList, *]] - - //OneAnd's tailRecM fails on LazyList due to the fact that. todo: replace NonEmptyStream with NonEmptyLazyList using newtype https://github.com/typelevel/cats/issues/2903 - checkAll("NonEmptyStream[Int]", MonadTests[NonEmptyStream].stackUnsafeMonad[Int, Int, Int]) - checkAll("Monad[NonEmptyStream[A]]", SerializableTests.serializable(Monad[NonEmptyStream])) - - checkAll("NonEmptyStream[Int]", ComonadTests[NonEmptyStream].comonad[Int, Int, Int]) - checkAll("Comonad[NonEmptyStream[A]]", SerializableTests.serializable(Comonad[NonEmptyStream])) - test("size is consistent with toList.size") { forAll { (oa: OneAnd[Vector, Int]) => oa.size should ===(oa.toList.size.toLong) } } - test("Show is not empty and is formatted as expected") { - forAll { (nel: NonEmptyStream[Int]) => - nel.show.nonEmpty should ===(true) - nel.show.startsWith("OneAnd(") should ===(true) - nel.show should ===(implicitly[Show[NonEmptyStream[Int]]].show(nel)) - nel.show.contains(nel.head.show) should ===(true) - } - } - - test("Show is formatted correctly") { - val oneAnd = NonEmptyStream("Test") - oneAnd.show should ===(s"OneAnd(Test, ${compat.lazyList.lazyListString}())") - } - - test("Creating OneAnd + unwrap is identity") { - forAll { (i: Int, tail: LazyList[Int]) => - val stream = i #:: tail - val oneAnd = NonEmptyStream(i, tail: _*) - stream should ===(oneAnd.unwrap) - } - } - - test("NonEmptyStream#find is consistent with Stream#find") { - forAll { (nel: NonEmptyStream[Int], p: Int => Boolean) => - val stream = nel.unwrap - nel.find(p) should ===(stream.find(p)) - } - } - - test("NonEmptyStream#exists is consistent with Stream#exists") { - forAll { (nel: NonEmptyStream[Int], p: Int => Boolean) => - val stream = nel.unwrap - nel.exists(p) should ===(stream.exists(p)) - } - } - - test("NonEmptyStream#forall is consistent with Stream#forall") { - forAll { (nel: NonEmptyStream[Int], p: Int => Boolean) => - val stream = nel.unwrap - nel.forall(p) should ===(stream.forall(p)) - } - } - - test("NonEmptyStream#map is consistent with Stream#map") { - forAll { (nel: NonEmptyStream[Int], p: Int => String) => - val stream = nel.unwrap - nel.map(p).unwrap should ===(stream.map(p)) - } - } - - test("NonEmptyStream#nonEmptyPartition remains sorted") { - forAll { (nes: NonEmptyStream[Int], f: Int => Either[String, String]) => - val nesf = nes.map(f) - val sortedStream = (nesf.head #:: nesf.tail).sorted - val sortedNes = OneAnd(sortedStream.head, sortedStream.tail) - val ior = Reducible[NonEmptyStream].nonEmptyPartition(sortedNes)(identity) - - ior.left.map(xs => xs.sorted should ===(xs)) - ior.right.map(xs => xs.sorted should ===(xs)) - } - } - - test("reduceLeft consistent with foldLeft") { - forAll { (nel: NonEmptyStream[Int], f: (Int, Int) => Int) => - nel.reduceLeft(f) should ===(nel.tail.foldLeft(nel.head)(f)) - } - } - - test("reduceRight consistent with foldRight") { - forAll { (nel: NonEmptyStream[Int], f: (Int, Eval[Int]) => Eval[Int]) => - val got = nel.reduceRight(f).value - nel.unwrap.toList.reverse match { - case last :: rev => - val expected = rev.reverse.foldRight(last)((a, b) => f(a, Now(b)).value) - got should ===(expected) - case _ => fail("nonempty turns out to be empty") - } - - } - } - - test("reduce consistent with fold") { - forAll { (nel: NonEmptyStream[Int]) => - nel.reduce should ===(nel.fold) - } - } - - test("reduce consistent with reduceK") { - forAll { (nel: NonEmptyStream[Option[Int]]) => - nel.reduce(SemigroupK[Option].algebra[Int]) should ===(nel.reduceK) - } - } - - test("reduceLeftToOption consistent with foldLeft + Option") { - forAll { (nel: NonEmptyStream[Int], f: Int => String, g: (String, Int) => String) => - val expected = nel.tail.foldLeft(Option(f(nel.head))) { (opt, i) => - opt.map(s => g(s, i)) - } - nel.reduceLeftToOption(f)(g) should ===(expected) - } - } - - test("reduceRightToOption consistent with foldRight + Option") { - forAll { (nel: NonEmptyStream[Int], f: Int => String, g: (Int, Eval[String]) => Eval[String]) => - val got = nel.reduceRightToOption(f)(g).value - nel.unwrap.toList.reverse match { - case last :: rev => - val expected = rev.reverse.foldRight(Option(f(last))) { (i, opt) => - opt.map(s => g(i, Now(s)).value) - } - got should ===(expected) - case _ => fail("nonempty turns out to be empty") - } - } - } - - test("filter includes elements based on a predicate") { - forAll { (nes: NonEmptyStream[Int], pred: Int => Boolean) => - nes.filter(pred) should ===(nes.unwrap.filter(pred)) - } - } - -} - -class ReducibleNonEmptyStreamSuite extends ReducibleSuite[NonEmptyStream]("NonEmptyStream") { - def iterator[T](nes: NonEmptyStream[T]): Iterator[T] = - (nes.head #:: nes.tail).iterator - - def range(start: Long, endInclusive: Long): NonEmptyStream[Long] = { - // if we inline this we get a bewildering implicit numeric widening - // error message in Scala 2.10 - val tailStart: Long = start + 1L - NonEmptyStream(start, toLazyList(tailStart.to(endInclusive))) - } } diff --git a/tests/src/test/scala/cats/tests/ParallelSuite.scala b/tests/src/test/scala/cats/tests/ParallelSuite.scala index 16a7a2b020..a755c510a6 100644 --- a/tests/src/test/scala/cats/tests/ParallelSuite.scala +++ b/tests/src/test/scala/cats/tests/ParallelSuite.scala @@ -447,10 +447,13 @@ class ParallelSuite extends CatsSuite with ApplicativeErrorForEitherTest { checkAll("NonEmptyParallel[List, ZipList]", NonEmptyParallelTests[List, ZipList].nonEmptyParallel[Int, String]) // Can't test Parallel here, as Applicative[ZipStream].pure doesn't terminate checkAll("Parallel[Stream, ZipStream]", NonEmptyParallelTests[LazyList, ZipStream].nonEmptyParallel[Int, String]) + checkAll("NonEmptyParallel[NonEmptyVector, ZipNonEmptyVector]", NonEmptyParallelTests[NonEmptyVector, ZipNonEmptyVector].nonEmptyParallel[Int, String]) + checkAll("NonEmptyParallel[NonEmptyList, ZipNonEmptyList]", NonEmptyParallelTests[NonEmptyList, ZipNonEmptyList].nonEmptyParallel[Int, String]) + checkAll("Parallel[NonEmptyStream, OneAnd[ZipStream, *]", ParallelTests[NonEmptyStream, OneAnd[ZipStream, *]].parallel[Int, String])